From 7b448400a3f9c83c293684de9277e3506005848e Mon Sep 17 00:00:00 2001 From: Hackerry Date: Mon, 15 Feb 2021 18:08:24 -0800 Subject: [PATCH 01/70] Finished wireframe #1 basic functionality --- app.js | 4 ++-- public/css/add-activity.css | 42 +++++++++++++++++++++++++++++++++++++ routes/add.js | 8 +++++++ views/add.handlebars | 39 ++++++++++++++++++++++++++++++++++ 4 files changed, 91 insertions(+), 2 deletions(-) create mode 100644 public/css/add-activity.css create mode 100644 routes/add.js create mode 100644 views/add.handlebars diff --git a/app.js b/app.js index edb9f73ea..f93cdb1a7 100644 --- a/app.js +++ b/app.js @@ -10,7 +10,7 @@ var handlebars = require('express3-handlebars') var index = require('./routes/index'); // Example route -// var user = require('./routes/user'); +var add = require('./routes/add'); var app = express(); @@ -36,7 +36,7 @@ if ('development' == app.get('env')) { app.get('/', index.view); // Example route -// app.get('/users', user.list); +app.get('/add', add.view); http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); diff --git a/public/css/add-activity.css b/public/css/add-activity.css new file mode 100644 index 000000000..1191ded86 --- /dev/null +++ b/public/css/add-activity.css @@ -0,0 +1,42 @@ +* { + padding: 0; + margin: 0; +} +#back-arrow { + display: block; + text-align: left; + font-size: 2em; + margin-left: 10%; +} +#back-arrow:hover { + cursor: pointer; +} +#root-div { + margin: 0 auto; + width: 50%; + text-align: center; +} + +#activity-form { + width: 50%; + margin: 0 auto; +} +.form-entry { + width: 60%; + margin: 20px auto; +} +#form-submit { + width: 60%; +} + +label { + display: block; + text-align: left; + margin-bottom: 5px; +} +input, select { + width: 100%; +} +input { + padding: 2px 5px; +} \ No newline at end of file diff --git a/routes/add.js b/routes/add.js new file mode 100644 index 000000000..36401705a --- /dev/null +++ b/routes/add.js @@ -0,0 +1,8 @@ + +/* + * GET home page. + */ + +exports.view = function(req, res){ + res.render('add'); + }; \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars new file mode 100644 index 000000000..275039f4b --- /dev/null +++ b/views/add.handlebars @@ -0,0 +1,39 @@ + + + + + + +
+ +

Add Activity

+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+
+ + \ No newline at end of file From 454dcec75f06e4336b96a35edc5ddada8a95642e Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Mon, 15 Feb 2021 18:35:10 -0800 Subject: [PATCH 02/70] update A5 --- .../node_modules => }/.bin/handlebars | 0 .../node_modules => }/.bin/semver | 0 .../node_modules => }/.bin/uglifyjs | 0 .../node_modules => }/amdefine/LICENSE | 4 +- .../node_modules => }/amdefine/README.md | 2 +- .../node_modules => }/amdefine/amdefine.js | 10 +- .../node_modules => }/amdefine/intercept.js | 0 node_modules/amdefine/package.json | 48 + .../node_modules => }/async/LICENSE | 0 .../node_modules => }/async/README.md | 0 .../node_modules => }/async/component.json | 0 .../node_modules => }/async/lib/async.js | 0 node_modules/async/package.json | 65 + .../node_modules => }/batch/.npmignore | 0 .../node_modules => }/batch/History.md | 0 .../connect/node_modules => }/batch/Makefile | 0 .../connect/node_modules => }/batch/Readme.md | 0 .../node_modules => }/batch/component.json | 0 .../connect/node_modules => }/batch/index.js | 0 node_modules/batch/package.json | 39 + .../node_modules => }/bson/.travis.yml | 0 .../{mongodb/node_modules => }/bson/Makefile | 0 .../{mongodb/node_modules => }/bson/README.md | 0 .../node_modules => }/bson/binding.gyp | 0 .../bson/browser_build/bson.js | 0 .../bson/browser_build/package.json | 0 .../node_modules => }/bson/build/Makefile | 98 +- .../Release/obj.target/bson/ext/bson.o.d.raw | 19 + .../bson/build/binding.Makefile | 2 +- node_modules/bson/build/bson.target.mk | 188 ++ node_modules/bson/build/config.gypi | 203 ++ node_modules/bson/build/gyp-mac-tool | 615 +++++ .../node_modules => }/bson/build_browser.js | 0 .../node_modules => }/bson/ext/Makefile | 0 .../node_modules => }/bson/ext/bson.cc | 2090 ++++++++--------- .../node_modules => }/bson/ext/bson.h | 556 ++--- .../node_modules => }/bson/ext/index.js | 0 .../{mongodb/node_modules => }/bson/ext/nan.h | 0 .../bson/ext/win32/ia32/bson.node | Bin .../bson/ext/win32/x64/bson.node | Bin .../node_modules => }/bson/ext/wscript | 0 .../node_modules => }/bson/lib/bson/binary.js | 0 .../bson/lib/bson/binary_parser.js | 0 .../node_modules => }/bson/lib/bson/bson.js | 0 .../node_modules => }/bson/lib/bson/code.js | 0 .../node_modules => }/bson/lib/bson/db_ref.js | 0 .../node_modules => }/bson/lib/bson/double.js | 0 .../bson/lib/bson/float_parser.js | 0 .../node_modules => }/bson/lib/bson/index.js | 0 .../node_modules => }/bson/lib/bson/long.js | 0 .../bson/lib/bson/max_key.js | 0 .../bson/lib/bson/min_key.js | 0 .../bson/lib/bson/objectid.js | 0 .../node_modules => }/bson/lib/bson/symbol.js | 0 .../bson/lib/bson/timestamp.js | 0 node_modules/bson/package.json | 74 + .../node_modules => }/bson/tools/gleak.js | 0 .../bson/tools/jasmine-1.1.0/MIT.LICENSE | 0 .../bson/tools/jasmine-1.1.0/jasmine-html.js | 0 .../bson/tools/jasmine-1.1.0/jasmine.css | 0 .../bson/tools/jasmine-1.1.0/jasmine.js | 0 .../tools/jasmine-1.1.0/jasmine_favicon.png | Bin .../node_modules => }/buffer-crc32/.npmignore | 0 .../buffer-crc32/.travis.yml | 0 .../node_modules => }/buffer-crc32/README.md | 0 .../node_modules => }/buffer-crc32/index.js | 0 node_modules/buffer-crc32/package.json | 61 + .../buffer-crc32/tests/crc.test.js | 0 .../node_modules => }/bytes/.npmignore | 0 .../node_modules => }/bytes/History.md | 0 .../connect/node_modules => }/bytes/Makefile | 0 .../connect/node_modules => }/bytes/Readme.md | 0 .../node_modules => }/bytes/component.json | 0 .../connect/node_modules => }/bytes/index.js | 0 node_modules/bytes/package.json | 47 + .../node_modules => }/commander/History.md | 0 .../node_modules => }/commander/Readme.md | 0 .../node_modules => }/commander/index.js | 0 node_modules/commander/package.json | 62 + .../node_modules => }/connect/.npmignore | 0 .../node_modules => }/connect/.travis.yml | 0 .../node_modules => }/connect/LICENSE | 0 .../node_modules => }/connect/Readme.md | 0 .../node_modules => }/connect/index.js | 0 .../node_modules => }/connect/lib/cache.js | 0 .../node_modules => }/connect/lib/connect.js | 0 .../node_modules => }/connect/lib/index.js | 0 .../connect/lib/middleware/basicAuth.js | 0 .../connect/lib/middleware/bodyParser.js | 0 .../connect/lib/middleware/compress.js | 0 .../connect/lib/middleware/cookieParser.js | 0 .../connect/lib/middleware/cookieSession.js | 0 .../connect/lib/middleware/csrf.js | 0 .../connect/lib/middleware/directory.js | 0 .../connect/lib/middleware/errorHandler.js | 0 .../connect/lib/middleware/favicon.js | 0 .../connect/lib/middleware/json.js | 0 .../connect/lib/middleware/limit.js | 0 .../connect/lib/middleware/logger.js | 0 .../connect/lib/middleware/methodOverride.js | 0 .../connect/lib/middleware/multipart.js | 0 .../connect/lib/middleware/query.js | 0 .../connect/lib/middleware/responseTime.js | 0 .../connect/lib/middleware/session.js | 0 .../connect/lib/middleware/session/cookie.js | 0 .../connect/lib/middleware/session/memory.js | 0 .../connect/lib/middleware/session/session.js | 0 .../connect/lib/middleware/session/store.js | 0 .../connect/lib/middleware/static.js | 0 .../connect/lib/middleware/staticCache.js | 0 .../connect/lib/middleware/timeout.js | 0 .../connect/lib/middleware/urlencoded.js | 0 .../connect/lib/middleware/vhost.js | 0 .../node_modules => }/connect/lib/patch.js | 0 .../node_modules => }/connect/lib/proto.js | 0 .../connect/lib/public/directory.html | 0 .../connect/lib/public/error.html | 0 .../connect/lib/public/favicon.ico | Bin .../connect/lib/public/icons/folder.png | Bin .../connect/lib/public/icons/page.png | Bin .../connect/lib/public/icons/page_add.png | Bin .../connect/lib/public/icons/page_attach.png | Bin .../connect/lib/public/icons/page_code.png | Bin .../connect/lib/public/icons/page_copy.png | Bin .../connect/lib/public/icons/page_delete.png | Bin .../connect/lib/public/icons/page_edit.png | Bin .../connect/lib/public/icons/page_error.png | Bin .../connect/lib/public/icons/page_excel.png | Bin .../connect/lib/public/icons/page_find.png | Bin .../connect/lib/public/icons/page_gear.png | Bin .../connect/lib/public/icons/page_go.png | Bin .../connect/lib/public/icons/page_green.png | Bin .../connect/lib/public/icons/page_key.png | Bin .../lib/public/icons/page_lightning.png | Bin .../connect/lib/public/icons/page_link.png | Bin .../lib/public/icons/page_paintbrush.png | Bin .../connect/lib/public/icons/page_paste.png | Bin .../connect/lib/public/icons/page_red.png | Bin .../connect/lib/public/icons/page_refresh.png | Bin .../connect/lib/public/icons/page_save.png | Bin .../connect/lib/public/icons/page_white.png | Bin .../lib/public/icons/page_white_acrobat.png | Bin .../public/icons/page_white_actionscript.png | Bin .../lib/public/icons/page_white_add.png | Bin .../connect/lib/public/icons/page_white_c.png | Bin .../lib/public/icons/page_white_camera.png | Bin .../lib/public/icons/page_white_cd.png | Bin .../lib/public/icons/page_white_code.png | Bin .../lib/public/icons/page_white_code_red.png | Bin .../public/icons/page_white_coldfusion.png | Bin .../public/icons/page_white_compressed.png | Bin .../lib/public/icons/page_white_copy.png | Bin .../lib/public/icons/page_white_cplusplus.png | Bin .../lib/public/icons/page_white_csharp.png | Bin .../lib/public/icons/page_white_cup.png | Bin .../lib/public/icons/page_white_database.png | Bin .../lib/public/icons/page_white_delete.png | Bin .../lib/public/icons/page_white_dvd.png | Bin .../lib/public/icons/page_white_edit.png | Bin .../lib/public/icons/page_white_error.png | Bin .../lib/public/icons/page_white_excel.png | Bin .../lib/public/icons/page_white_find.png | Bin .../lib/public/icons/page_white_flash.png | Bin .../lib/public/icons/page_white_freehand.png | Bin .../lib/public/icons/page_white_gear.png | Bin .../lib/public/icons/page_white_get.png | Bin .../lib/public/icons/page_white_go.png | Bin .../connect/lib/public/icons/page_white_h.png | Bin .../public/icons/page_white_horizontal.png | Bin .../lib/public/icons/page_white_key.png | Bin .../lib/public/icons/page_white_lightning.png | Bin .../lib/public/icons/page_white_link.png | Bin .../lib/public/icons/page_white_magnify.png | Bin .../lib/public/icons/page_white_medal.png | Bin .../lib/public/icons/page_white_office.png | Bin .../lib/public/icons/page_white_paint.png | Bin .../public/icons/page_white_paintbrush.png | Bin .../lib/public/icons/page_white_paste.png | Bin .../lib/public/icons/page_white_php.png | Bin .../lib/public/icons/page_white_picture.png | Bin .../public/icons/page_white_powerpoint.png | Bin .../lib/public/icons/page_white_put.png | Bin .../lib/public/icons/page_white_ruby.png | Bin .../lib/public/icons/page_white_stack.png | Bin .../lib/public/icons/page_white_star.png | Bin .../lib/public/icons/page_white_swoosh.png | Bin .../lib/public/icons/page_white_text.png | Bin .../public/icons/page_white_text_width.png | Bin .../lib/public/icons/page_white_tux.png | Bin .../lib/public/icons/page_white_vector.png | Bin .../public/icons/page_white_visualstudio.png | Bin .../lib/public/icons/page_white_width.png | Bin .../lib/public/icons/page_white_word.png | Bin .../lib/public/icons/page_white_world.png | Bin .../lib/public/icons/page_white_wrench.png | Bin .../lib/public/icons/page_white_zip.png | Bin .../connect/lib/public/icons/page_word.png | Bin .../connect/lib/public/icons/page_world.png | Bin .../connect/lib/public/style.css | 0 .../node_modules => }/connect/lib/utils.js | 0 node_modules/connect/package.json | 86 + .../pause => cookie-signature}/.npmignore | 0 .../cookie-signature/History.md | 0 .../pause => cookie-signature}/Makefile | 0 .../cookie-signature/Readme.md | 0 .../cookie-signature/index.js | 0 node_modules/cookie-signature/package.json | 46 + .../node_modules => }/cookie/.npmignore | 0 .../node_modules => }/cookie/.travis.yml | 0 .../{express/node_modules => }/cookie/LICENSE | 0 .../node_modules => }/cookie/README.md | 0 .../node_modules => }/cookie/index.js | 0 node_modules/cookie/package.json | 59 + .../node_modules => }/cookie/test/mocha.opts | 0 .../node_modules => }/cookie/test/parse.js | 0 .../cookie/test/serialize.js | 0 .../debuglog => core-util-is}/LICENSE | 2 +- .../node_modules => }/core-util-is/README.md | 0 .../core-util-is/float.patch | 0 .../core-util-is => core-util-is/lib}/util.js | 19 +- node_modules/core-util-is/package.json | 62 + node_modules/core-util-is/test.js | 68 + .../node_modules => }/debug/Readme.md | 17 +- .../{express/node_modules => }/debug/debug.js | 0 .../node_modules => }/debug/lib/debug.js | 39 +- node_modules/debug/package.json | 67 + node_modules/express/benchmarks/run | 0 node_modules/express/bin/express | 0 .../node_modules/buffer-crc32/package.json | 43 - .../node_modules/keypress/package.json | 32 - .../node_modules/commander/package.json | 45 - .../connect/node_modules/batch/package.json | 22 - .../connect/node_modules/bytes/package.json | 29 - .../node_modules/readable-stream/README.md | 768 ------ .../node_modules/core-util-is/lib/util.js | 107 - .../node_modules/core-util-is/package.json | 35 - .../node_modules/debuglog/README.md | 40 - .../node_modules/debuglog/debuglog.js | 22 - .../node_modules/debuglog/package.json | 30 - .../node_modules/readable-stream/package.json | 42 - .../node_modules/stream-counter/package.json | 32 - .../node_modules/multiparty/package.json | 47 - .../node_modules/negotiator/package.json | 54 - .../connect/node_modules/pause/package.json | 24 - .../connect/node_modules/qs/package.json | 42 - .../node_modules/raw-body/package.json | 45 - .../connect/node_modules/uid2/package.json | 16 - .../express/node_modules/connect/package.json | 69 - .../cookie-signature/package.json | 28 - .../express/node_modules/cookie/package.json | 41 - .../express/node_modules/debug/index.js | 5 - .../express/node_modules/debug/package.json | 46 - .../express/node_modules/fresh/package.json | 32 - .../merge-descriptors/package.json | 30 - .../express/node_modules/methods/package.json | 33 - .../express/node_modules/mkdirp/package.json | 38 - .../node_modules/range-parser/package.json | 24 - .../send/node_modules/mime/package.json | 36 - .../express/node_modules/send/package.json | 46 - node_modules/express/package.json | 87 +- node_modules/express3-handlebars/.npmignore 2 | 2 + node_modules/express3-handlebars/HISTORY 2.md | 149 ++ node_modules/express3-handlebars/LICENSE 2 | 27 + node_modules/express3-handlebars/README 2.md | 612 +++++ .../examples/advanced/app 2.js | 99 + node_modules/express3-handlebars/index 2.js | 13 + .../lib/express-handlebars 2.js | 367 +++ .../node_modules/async/package.json | 47 - .../glob/node_modules/inherits/inherits.js | 1 - .../node_modules/inherits/inherits_browser.js | 23 - .../glob/node_modules/inherits/package.json | 33 - .../glob/node_modules/inherits/test.js | 25 - .../minimatch/node_modules/lru-cache/LICENSE | 23 - .../node_modules/lru-cache/package.json | 33 - .../node_modules/lru-cache/test/foreach.js | 52 - .../node_modules/sigmund/package.json | 42 - .../glob/node_modules/minimatch/package.json | 40 - .../node_modules/glob/package.json | 39 - .../node_modules/wordwrap/package.json | 45 - .../node_modules/optimist/package.json | 46 - .../node_modules/source-map/CHANGELOG.md | 112 - .../node_modules/amdefine/package.json | 36 - .../node_modules/source-map/package.json | 110 - .../node_modules/uglify-js/package.json | 39 - .../node_modules/handlebars/package.json | 70 - .../node_modules/semver/LICENSE | 27 - .../node_modules/semver/package.json | 32 - .../node_modules/semver/semver.min.js | 1 - .../express3-handlebars/package 2.json | 46 + node_modules/express3-handlebars/package.json | 67 +- .../node_modules => }/fresh/.npmignore | 0 .../node_modules => }/fresh/History.md | 0 .../{express/node_modules => }/fresh/Makefile | 0 .../node_modules => }/fresh/Readme.md | 0 .../{express/node_modules => }/fresh/index.js | 0 node_modules/fresh/package.json | 51 + .../node_modules => }/glob/.npmignore | 0 .../node_modules => }/glob/.travis.yml | 0 .../node_modules => }/glob/LICENSE | 0 .../node_modules => }/glob/README.md | 0 .../node_modules => }/glob/examples/g.js | 0 .../glob/examples/usr-local.js | 0 .../node_modules => }/glob/glob.js | 130 +- node_modules/glob/package.json | 61 + .../node_modules => }/glob/test/00-setup.js | 0 .../glob/test/bash-comparison.js | 0 .../glob/test/bash-results.json | 1 + .../node_modules => }/glob/test/cwd-test.js | 0 .../glob/test/globstar-match.js | 0 .../node_modules => }/glob/test/mark.js | 36 + .../glob/test/new-glob-optional-options.js | 0 .../glob/test/nocase-nomagic.js | 0 .../glob/test/pause-resume.js | 0 node_modules/glob/test/readme-issue.js | 36 + .../glob/test/root-nomount.js | 0 .../node_modules => }/glob/test/root.js | 0 .../node_modules => }/glob/test/stat.js | 0 .../node_modules => }/glob/test/zz-cleanup.js | 0 .../node_modules => }/handlebars/.npmignore | 0 .../node_modules => }/handlebars/LICENSE | 0 .../handlebars/README.markdown | 0 .../handlebars/bin/handlebars | 0 .../handlebars/dist/amd/handlebars.js | 0 .../handlebars/dist/amd/handlebars.runtime.js | 0 .../handlebars/dist/amd/handlebars/base.js | 0 .../dist/amd/handlebars/compiler/ast.js | 0 .../dist/amd/handlebars/compiler/base.js | 0 .../dist/amd/handlebars/compiler/compiler.js | 0 .../compiler/javascript-compiler.js | 0 .../dist/amd/handlebars/compiler/parser.js | 0 .../dist/amd/handlebars/compiler/printer.js | 0 .../dist/amd/handlebars/compiler/visitor.js | 0 .../dist/amd/handlebars/exception.js | 0 .../handlebars/dist/amd/handlebars/runtime.js | 0 .../dist/amd/handlebars/safe-string.js | 0 .../handlebars/dist/amd/handlebars/utils.js | 0 .../handlebars/dist/cjs/handlebars.js | 0 .../handlebars/dist/cjs/handlebars.runtime.js | 0 .../handlebars/dist/cjs/handlebars/base.js | 0 .../dist/cjs/handlebars/compiler/ast.js | 0 .../dist/cjs/handlebars/compiler/base.js | 0 .../dist/cjs/handlebars/compiler/compiler.js | 0 .../compiler/javascript-compiler.js | 0 .../dist/cjs/handlebars/compiler/parser.js | 0 .../dist/cjs/handlebars/compiler/printer.js | 0 .../dist/cjs/handlebars/compiler/visitor.js | 0 .../dist/cjs/handlebars/exception.js | 0 .../handlebars/dist/cjs/handlebars/runtime.js | 0 .../dist/cjs/handlebars/safe-string.js | 0 .../handlebars/dist/cjs/handlebars/utils.js | 0 .../handlebars/dist/handlebars.amd.js | 0 .../handlebars/dist/handlebars.amd.min.js | 0 .../handlebars/dist/handlebars.js | 0 .../handlebars/dist/handlebars.min.js | 0 .../handlebars/dist/handlebars.runtime.amd.js | 0 .../dist/handlebars.runtime.amd.min.js | 0 .../handlebars/dist/handlebars.runtime.js | 0 .../handlebars/dist/handlebars.runtime.min.js | 0 .../handlebars/lib/handlebars.js | 0 .../handlebars/lib/handlebars.runtime.js | 0 .../handlebars/lib/handlebars/base.js | 0 .../handlebars/lib/handlebars/compiler/ast.js | 0 .../lib/handlebars/compiler/base.js | 0 .../lib/handlebars/compiler/compiler.js | 0 .../compiler/javascript-compiler.js | 0 .../lib/handlebars/compiler/parser.js | 0 .../lib/handlebars/compiler/printer.js | 0 .../lib/handlebars/compiler/visitor.js | 0 .../handlebars/lib/handlebars/exception.js | 0 .../handlebars/lib/handlebars/runtime.js | 0 .../handlebars/lib/handlebars/safe-string.js | 0 .../handlebars/lib/handlebars/utils.js | 0 .../node_modules => }/handlebars/lib/index.js | 0 node_modules/handlebars/package.json | 91 + .../handlebars/release-notes.md | 0 .../node_modules => }/handlebars/runtime.js | 0 .../node_modules => }/hooks/.npmignore | 0 .../node_modules => }/hooks/Makefile | 0 .../node_modules => }/hooks/README.md | 0 .../node_modules => }/hooks/hooks.alt.js | 0 .../node_modules => }/hooks/hooks.js | 0 node_modules/hooks/package.json | 70 + .../{mongoose/node_modules => }/hooks/test.js | 0 .../glob/node_modules => }/inherits/LICENSE | 0 .../glob/node_modules => }/inherits/README.md | 0 node_modules/inherits/inherits.js | 9 + node_modules/inherits/inherits_browser.js | 27 + node_modules/inherits/package.json | 62 + node_modules/isarray/README.md | 54 + node_modules/isarray/build/build.js | 209 ++ node_modules/isarray/component.json | 19 + node_modules/isarray/index.js | 3 + node_modules/isarray/package.json | 57 + .../node_modules => }/kerberos/LICENSE | 0 .../node_modules => }/kerberos/README.md | 0 .../node_modules => }/kerberos/binding.gyp | 0 .../node_modules => }/kerberos/build/Makefile | 98 +- .../obj.target/kerberos/lib/kerberos.o.d.raw | 12 + .../kerberos/build/binding.Makefile | 2 +- node_modules/kerberos/build/config.gypi | 203 ++ node_modules/kerberos/build/gyp-mac-tool | 615 +++++ .../kerberos/build/kerberos.target.mk | 204 ++ .../node_modules => }/kerberos/index.js | 0 .../kerberos/lib/auth_processes/mongodb.js | 0 .../node_modules => }/kerberos/lib/base64.c | 0 .../node_modules => }/kerberos/lib/base64.h | 0 .../kerberos/lib/kerberos.cc | 0 .../node_modules => }/kerberos/lib/kerberos.h | 0 .../kerberos/lib/kerberos.js | 0 .../kerberos/lib/kerberos_context.cc | 0 .../kerberos/lib/kerberos_context.h | 0 .../kerberos/lib/kerberosgss.c | 0 .../kerberos/lib/kerberosgss.h | 0 .../node_modules => }/kerberos/lib/sspi.js | 0 .../kerberos/lib/win32/base64.c | 0 .../kerberos/lib/win32/base64.h | 0 .../kerberos/lib/win32/kerberos.cc | 0 .../kerberos/lib/win32/kerberos.h | 0 .../kerberos/lib/win32/kerberos_sspi.c | 0 .../kerberos/lib/win32/kerberos_sspi.h | 0 .../kerberos/lib/win32/worker.cc | 0 .../kerberos/lib/win32/worker.h | 0 .../lib/win32/wrappers/security_buffer.cc | 0 .../lib/win32/wrappers/security_buffer.h | 0 .../lib/win32/wrappers/security_buffer.js | 0 .../wrappers/security_buffer_descriptor.cc | 0 .../wrappers/security_buffer_descriptor.h | 0 .../wrappers/security_buffer_descriptor.js | 0 .../lib/win32/wrappers/security_context.cc | 0 .../lib/win32/wrappers/security_context.h | 0 .../lib/win32/wrappers/security_context.js | 0 .../win32/wrappers/security_credentials.cc | 0 .../lib/win32/wrappers/security_credentials.h | 0 .../win32/wrappers/security_credentials.js | 0 .../node_modules => }/kerberos/lib/worker.cc | 0 .../node_modules => }/kerberos/lib/worker.h | 0 node_modules/kerberos/package.json | 57 + .../kerberos/test/kerberos_tests.js | 0 .../kerberos/test/kerberos_win32_test.js | 0 .../win32/security_buffer_descriptor_tests.js | 0 .../test/win32/security_buffer_tests.js | 0 .../test/win32/security_credentials_tests.js | 0 .../node_modules => }/keypress/README.md | 0 .../node_modules => }/keypress/index.js | 0 node_modules/keypress/package.json | 53 + .../node_modules => }/keypress/test.js | 0 .../multiparty => lru-cache}/.npmignore | 0 node_modules/lru-cache/.travis.yml | 8 + .../node_modules => }/lru-cache/CONTRIBUTORS | 0 node_modules/lru-cache/LICENSE | 15 + .../node_modules => }/lru-cache/README.md | 46 +- .../lru-cache/lib/lru-cache.js | 114 +- node_modules/lru-cache/package.json | 56 + .../node_modules => }/lru-cache/test/basic.js | 79 +- node_modules/lru-cache/test/foreach.js | 120 + .../lru-cache/test/memory-leak.js | 1 + node_modules/lru-cache/test/serialize.js | 216 ++ .../merge-descriptors/.npmignore | 0 .../merge-descriptors/README.md | 0 .../merge-descriptors/component.json | 0 .../merge-descriptors/index.js | 0 node_modules/merge-descriptors/package.json | 47 + .../node_modules => }/methods/History.md | 0 .../node_modules => }/methods/Readme.md | 0 .../node_modules => }/methods/index.js | 0 node_modules/methods/package.json | 51 + .../send/node_modules => }/mime/LICENSE | 0 .../send/node_modules => }/mime/README.md | 0 .../send/node_modules => }/mime/mime.js | 0 node_modules/mime/package.json | 57 + .../send/node_modules => }/mime/test.js | 0 .../node_modules => }/mime/types/mime.types | 0 .../node_modules => }/mime/types/node.types | 0 .../node_modules => }/minimatch/.npmignore | 0 .../glob/node_modules => }/minimatch/LICENSE | 0 .../node_modules => }/minimatch/README.md | 4 +- .../node_modules => }/minimatch/minimatch.js | 14 +- node_modules/minimatch/package.json | 61 + .../node_modules => }/minimatch/test/basic.js | 0 .../minimatch/test/brace-expand.js | 0 .../minimatch/test/caching.js | 0 .../minimatch/test/defaults.js | 2 +- .../test/extglob-ending-with-state-char.js | 0 .../node_modules => }/mkdirp/.npmignore | 0 .../node_modules => }/mkdirp/.travis.yml | 0 .../{express/node_modules => }/mkdirp/LICENSE | 0 .../node_modules => }/mkdirp/examples/pow.js | 0 .../node_modules => }/mkdirp/index.js | 0 node_modules/mkdirp/package.json | 55 + .../node_modules => }/mkdirp/readme.markdown | 0 .../node_modules => }/mkdirp/test/chmod.js | 0 .../node_modules => }/mkdirp/test/clobber.js | 0 .../node_modules => }/mkdirp/test/mkdirp.js | 0 .../node_modules => }/mkdirp/test/perm.js | 0 .../mkdirp/test/perm_sync.js | 0 .../node_modules => }/mkdirp/test/race.js | 0 .../node_modules => }/mkdirp/test/rel.js | 0 .../node_modules => }/mkdirp/test/return.js | 0 .../mkdirp/test/return_sync.js | 0 .../node_modules => }/mkdirp/test/root.js | 0 .../node_modules => }/mkdirp/test/sync.js | 0 .../node_modules => }/mkdirp/test/umask.js | 0 .../mkdirp/test/umask_sync.js | 0 node_modules/mongodb/index.js | 0 .../build/Release/.deps/Release/bson.node.d | 1 - .../.deps/Release/obj.target/bson.node.d | 1 - .../Release/obj.target/bson/ext/bson.o.d | 29 - .../node_modules/bson/build/Release/bson.node | Bin 65587 -> 0 bytes .../bson/build/Release/linker.lock | 0 .../bson/build/Release/obj.target/bson.node | Bin 65587 -> 0 bytes .../build/Release/obj.target/bson/ext/bson.o | Bin 55560 -> 0 bytes .../node_modules/bson/build/bson.target.mk | 126 - .../node_modules/bson/build/config.gypi | 115 - .../mongodb/node_modules/bson/package.json | 57 - .../node_modules/kerberos/build/Makefile | 332 --- .../Release/.deps/Release/kerberos.node.d | 1 - .../.deps/Release/obj.target/kerberos.node.d | 1 - .../kerberos/build/Release/kerberos.node | Bin 6608 -> 0 bytes .../kerberos/build/Release/linker.lock | 0 .../build/Release/obj.target/kerberos.node | Bin 6608 -> 0 bytes .../kerberos/build/binding.Makefile | 6 - .../node_modules/kerberos/build/config.gypi | 115 - .../kerberos/build/kerberos.target.mk | 42 - .../node_modules/kerberos/package.json | 39 - node_modules/mongodb/package.json | 96 +- node_modules/mongoose/contRun.sh | 0 .../mongoose/node_modules/hooks/package.json | 53 - .../mongoose/node_modules/mpath/package.json | 39 - .../node_modules/mpromise/package.json | 42 - .../mquery/node_modules/debug/package.json | 36 - .../build/Release/.deps/Release/bson.node.d | 1 - .../.deps/Release/obj.target/bson.node.d | 1 - .../Release/obj.target/bson/ext/bson.o.d | 28 - .../node_modules/bson/build/Release/bson.node | Bin 57156 -> 0 bytes .../bson/build/Release/linker.lock | 0 .../bson/build/Release/obj.target/bson.node | Bin 57156 -> 0 bytes .../build/Release/obj.target/bson/ext/bson.o | Bin 49084 -> 0 bytes .../node_modules/bson/build/bson.target.mk | 126 - .../node_modules/bson/build/config.gypi | 115 - .../mongodb/node_modules/bson/package.json | 56 - .../mongodb/node_modules/kerberos/LICENSE | 201 -- .../mongodb/node_modules/kerberos/README.md | 4 - .../mongodb/node_modules/kerberos/binding.gyp | 41 - .../Release/.deps/Release/kerberos.node.d | 1 - .../.deps/Release/obj.target/kerberos.node.d | 1 - .../kerberos/build/Release/kerberos.node | Bin 6608 -> 0 bytes .../kerberos/build/Release/linker.lock | 0 .../build/Release/obj.target/kerberos.node | Bin 6608 -> 0 bytes .../node_modules/kerberos/build/config.gypi | 115 - .../kerberos/build/kerberos.target.mk | 42 - .../mongodb/node_modules/kerberos/index.js | 6 - .../kerberos/lib/auth_processes/mongodb.js | 281 --- .../node_modules/kerberos/lib/base64.c | 120 - .../node_modules/kerberos/lib/base64.h | 18 - .../node_modules/kerberos/lib/kerberos.cc | 563 ----- .../node_modules/kerberos/lib/kerberos.h | 47 - .../node_modules/kerberos/lib/kerberos.js | 91 - .../kerberos/lib/kerberos_context.cc | 74 - .../kerberos/lib/kerberos_context.h | 48 - .../node_modules/kerberos/lib/kerberosgss.c | 666 ------ .../node_modules/kerberos/lib/kerberosgss.h | 70 - .../mongodb/node_modules/kerberos/lib/sspi.js | 15 - .../node_modules/kerberos/lib/win32/base64.c | 121 - .../node_modules/kerberos/lib/win32/base64.h | 18 - .../kerberos/lib/win32/kerberos.cc | 53 - .../kerberos/lib/win32/kerberos.h | 59 - .../kerberos/lib/win32/kerberos_sspi.c | 244 -- .../kerberos/lib/win32/kerberos_sspi.h | 106 - .../node_modules/kerberos/lib/win32/worker.cc | 7 - .../node_modules/kerberos/lib/win32/worker.h | 37 - .../lib/win32/wrappers/security_buffer.cc | 110 - .../lib/win32/wrappers/security_buffer.h | 46 - .../lib/win32/wrappers/security_buffer.js | 12 - .../wrappers/security_buffer_descriptor.cc | 177 -- .../wrappers/security_buffer_descriptor.h | 44 - .../wrappers/security_buffer_descriptor.js | 3 - .../lib/win32/wrappers/security_context.cc | 1211 ---------- .../lib/win32/wrappers/security_context.h | 85 - .../lib/win32/wrappers/security_context.js | 3 - .../win32/wrappers/security_credentials.cc | 468 ---- .../lib/win32/wrappers/security_credentials.h | 67 - .../win32/wrappers/security_credentials.js | 22 - .../node_modules/kerberos/lib/worker.cc | 7 - .../node_modules/kerberos/lib/worker.h | 39 - .../node_modules/kerberos/package.json | 39 - .../kerberos/test/kerberos_tests.js | 34 - .../kerberos/test/kerberos_win32_test.js | 19 - .../win32/security_buffer_descriptor_tests.js | 41 - .../test/win32/security_buffer_tests.js | 22 - .../test/win32/security_credentials_tests.js | 55 - .../mquery/node_modules/mongodb/package.json | 228 -- .../mongoose/node_modules/mquery/package.json | 44 - .../mongoose/node_modules/ms/.npmignore | 1 - .../mongoose/node_modules/ms/package.json | 19 - .../mongoose/node_modules/muri/package.json | 38 - .../node_modules/regexp-clone/package.json | 37 - .../mongoose/node_modules/sliced/package.json | 39 - node_modules/mongoose/package.json | 103 +- .../node_modules => }/mpath/.npmignore | 0 .../node_modules => }/mpath/.travis.yml | 0 .../node_modules => }/mpath/History.md | 0 .../{mongoose/node_modules => }/mpath/LICENSE | 0 .../node_modules => }/mpath/Makefile | 0 .../node_modules => }/mpath/README.md | 0 .../node_modules => }/mpath/index.js | 0 .../node_modules => }/mpath/lib/index.js | 0 node_modules/mpath/package.json | 56 + .../node_modules => }/mpath/test/index.js | 0 .../node_modules => }/mpromise/.idea/.name | 0 .../mpromise/.idea/codeStyleSettings.xml | 66 +- .../mpromise/.idea/encodings.xml | 10 +- .../inspectionProfiles/Project_Default.xml | 14 +- .../inspectionProfiles/profiles_settings.xml | 12 +- .../mpromise/.idea/jsLibraryMappings.xml | 20 +- .../Node_js_Dependencies_for_mpromise.xml | 24 +- .../node_modules => }/mpromise/.idea/misc.xml | 16 +- .../mpromise/.idea/modules.xml | 18 +- .../mpromise/.idea/mpromise.iml | 24 +- .../mpromise/.idea/other.xml | 14 +- .../mpromise/.idea/scopes/scope_settings.xml | 8 +- .../node_modules => }/mpromise/.idea/vcs.xml | 14 +- .../mpromise/.idea/workspace.xml | 1252 +++++----- .../node_modules => }/mpromise/.npmignore | 0 .../node_modules => }/mpromise/.travis.yml | 0 .../node_modules => }/mpromise/History.md | 0 .../node_modules => }/mpromise/LICENSE | 0 .../node_modules => }/mpromise/README.md | 0 .../node_modules => }/mpromise/index.js | 0 .../node_modules => }/mpromise/lib/promise.js | 0 node_modules/mpromise/package.json | 59 + .../mpromise/test/promise.domain.test.js | 0 .../mpromise/test/promise.test.js | 0 .../mpromise/test/promises.Aplus.js | 0 .../node_modules => }/mquery/.npmignore | 0 .../node_modules => }/mquery/.travis.yml | 0 .../node_modules => }/mquery/History.md | 0 .../node_modules => }/mquery/LICENSE | 0 .../node_modules => }/mquery/Makefile | 0 .../node_modules => }/mquery/README.md | 0 .../node_modules => }/mquery/index.js | 0 .../mquery/lib/collection/collection.js | 0 .../mquery/lib/collection/index.js | 0 .../mquery/lib/collection/node.js | 0 .../node_modules => }/mquery/lib/env.js | 0 .../node_modules => }/mquery/lib/mquery.js | 0 .../mquery/lib/permissions.js | 0 .../node_modules => }/mquery/lib/utils.js | 0 .../node_modules/bson/.travis.yml | 0 .../node_modules/bson/Makefile | 0 .../node_modules/bson/README.md | 0 .../node_modules/bson/binding.gyp | 0 .../node_modules/bson/browser_build/bson.js | 0 .../bson/browser_build/package.json | 0 .../node_modules/bson/build/Makefile | 98 +- .../Release/obj.target/bson/ext/bson.o.d.raw | 12 + .../node_modules/bson/build/binding.Makefile | 2 +- .../node_modules/bson/build/bson.target.mk | 188 ++ .../node_modules/bson/build/config.gypi | 203 ++ .../node_modules/bson/build/gyp-mac-tool | 615 +++++ .../node_modules/bson/build_browser.js | 0 .../node_modules/bson/ext/Makefile | 0 .../node_modules/bson/ext/bson.cc | 2090 ++++++++--------- .../node_modules/bson/ext/bson.h | 554 ++--- .../node_modules/bson/ext/index.js | 0 .../bson/ext/win32/ia32/bson.node | Bin .../node_modules/bson/ext/win32/x64/bson.node | Bin .../node_modules/bson/ext/wscript | 0 .../node_modules/bson/lib/bson/binary.js | 0 .../bson/lib/bson/binary_parser.js | 0 .../node_modules/bson/lib/bson/bson.js | 0 .../node_modules/bson/lib/bson/code.js | 0 .../node_modules/bson/lib/bson/db_ref.js | 0 .../node_modules/bson/lib/bson/double.js | 0 .../bson/lib/bson/float_parser.js | 0 .../node_modules/bson/lib/bson/index.js | 0 .../node_modules/bson/lib/bson/long.js | 0 .../node_modules/bson/lib/bson/max_key.js | 0 .../node_modules/bson/lib/bson/min_key.js | 0 .../node_modules/bson/lib/bson/objectid.js | 0 .../node_modules/bson/lib/bson/symbol.js | 0 .../node_modules/bson/lib/bson/timestamp.js | 0 .../mquery/node_modules/bson/package.json | 73 + .../node_modules/bson/tools/gleak.js | 0 .../bson/tools/jasmine-1.1.0/MIT.LICENSE | 0 .../bson/tools/jasmine-1.1.0/jasmine-html.js | 0 .../bson/tools/jasmine-1.1.0/jasmine.css | 0 .../bson/tools/jasmine-1.1.0/jasmine.js | 0 .../tools/jasmine-1.1.0/jasmine_favicon.png | Bin .../node_modules/debug}/.npmignore | 0 .../mquery/node_modules/debug/History.md | 0 .../mquery/node_modules/debug/Makefile | 0 .../mquery/node_modules/debug/Readme.md | 0 .../node_modules/debug/debug.component.js | 0 .../mquery/node_modules/debug/debug.js | 0 .../mquery/node_modules/debug/example/app.js | 0 .../node_modules/debug/example/browser.html | 0 .../node_modules/debug/example/wildcards.js | 0 .../node_modules/debug/example/worker.js | 0 .../mquery/node_modules/debug/head.js | 0 .../mquery/node_modules/debug/index.js | 0 .../mquery/node_modules/debug/lib/debug.js | 0 .../mquery/node_modules/debug/package.json | 53 + .../mquery/node_modules/debug/tail.js | 0 .../mquery/node_modules/mongodb/.travis.yml | 0 .../node_modules/mongodb/CONTRIBUTING.md | 0 .../mquery/node_modules/mongodb/LICENSE | 0 .../mquery/node_modules/mongodb/Makefile | 0 .../mquery/node_modules/mongodb/Readme.md | 0 .../mquery/node_modules/mongodb/index.js | 0 .../node_modules/mongodb/lib/mongodb/admin.js | 0 .../mongodb/lib/mongodb/auth/mongodb_cr.js | 0 .../lib/mongodb/auth/mongodb_gssapi.js | 0 .../mongodb/lib/mongodb/auth/mongodb_plain.js | 0 .../mongodb/lib/mongodb/auth/mongodb_sspi.js | 0 .../mongodb/lib/mongodb/collection.js | 0 .../lib/mongodb/commands/base_command.js | 0 .../lib/mongodb/commands/db_command.js | 0 .../lib/mongodb/commands/delete_command.js | 0 .../lib/mongodb/commands/get_more_command.js | 0 .../lib/mongodb/commands/insert_command.js | 0 .../mongodb/commands/kill_cursor_command.js | 0 .../lib/mongodb/commands/query_command.js | 0 .../lib/mongodb/commands/update_command.js | 0 .../mongodb/lib/mongodb/connection/base.js | 0 .../lib/mongodb/connection/connection.js | 0 .../lib/mongodb/connection/connection_pool.js | 0 .../mongodb/connection/connection_utils.js | 0 .../mongodb/lib/mongodb/connection/mongos.js | 0 .../lib/mongodb/connection/read_preference.js | 0 .../lib/mongodb/connection/repl_set/ha.js | 0 .../mongodb/connection/repl_set/options.js | 0 .../mongodb/connection/repl_set/repl_set.js | 0 .../connection/repl_set/repl_set_state.js | 0 .../repl_set/strategies/ping_strategy.js | 0 .../strategies/statistics_strategy.js | 0 .../mongodb/lib/mongodb/connection/server.js | 0 .../lib/mongodb/connection/url_parser.js | 0 .../mongodb/lib/mongodb/cursor.js | 0 .../mongodb/lib/mongodb/cursorstream.js | 0 .../node_modules/mongodb/lib/mongodb/db.js | 0 .../mongodb/lib/mongodb/gridfs/chunk.js | 0 .../mongodb/lib/mongodb/gridfs/grid.js | 0 .../mongodb/lib/mongodb/gridfs/gridstore.js | 0 .../mongodb/lib/mongodb/gridfs/readstream.js | 0 .../node_modules/mongodb/lib/mongodb/index.js | 0 .../mongodb/lib/mongodb/mongo_client.js | 0 .../lib/mongodb/responses/mongo_reply.js | 0 .../node_modules/mongodb/lib/mongodb/utils.js | 0 .../mquery/node_modules/mongodb/package.json | 245 ++ .../mquery/node_modules/mongodb/t.js | 0 node_modules/mquery/package.json | 63 + .../mquery/test/collection/browser.js | 0 .../mquery/test/collection/mongo.js | 0 .../mquery/test/collection/node.js | 0 .../node_modules => }/mquery/test/env.js | 0 .../node_modules => }/mquery/test/index.js | 0 .../node_modules/wordwrap => ms}/.npmignore | 0 .../{mongoose/node_modules => }/ms/Makefile | 0 .../{mongoose/node_modules => }/ms/README.md | 0 .../{mongoose/node_modules => }/ms/ms.js | 0 node_modules/ms/package.json | 39 + .../node_modules => }/ms/test/index.html | 0 .../ms/test/support/jquery.js | 0 .../node_modules => }/ms/test/test.js | 0 .../node_modules => }/multiparty/.jshintrc | 0 .../stream-counter => multiparty}/.npmignore | 0 .../node_modules => }/multiparty/.travis.yml | 0 .../node_modules => }/multiparty/CHANGELOG.md | 0 .../node_modules => }/multiparty/LICENSE | 0 .../node_modules => }/multiparty/README.md | 0 .../multiparty/examples/azureblobstorage.js | 0 .../multiparty/examples/s3.js | 0 .../multiparty/examples/upload.js | 0 .../node_modules => }/multiparty/index.js | 0 node_modules/multiparty/package.json | 64 + .../multiparty/test/bench-multipart-parser.js | 0 .../test/fixture/file/beta-sticker-1.png | Bin .../multiparty/test/fixture/file/blank.gif | Bin .../test/fixture/file/funkyfilename.txt | 0 .../test/fixture/file/menu_separator.png | Bin .../multiparty/test/fixture/file/pf1y5.png | Bin .../multiparty/test/fixture/file/plain.txt | 0 .../http/encoding/beta-sticker-1.png.http | 24 +- .../http/encoding/binaryfile.tar.gz.http | 24 +- .../test/fixture/http/encoding/blank.gif.http | 24 +- .../http/encoding/menu_seperator.png.http | 24 +- .../test/fixture/http/encoding/pf1y5.png.http | Bin .../test/fixture/http/encoding/plain.txt.http | 24 +- .../http/no-filename/filename-name.http | 22 +- .../fixture/http/no-filename/generic.http | 22 +- .../test/fixture/http/preamble/crlf.http | 24 +- .../test/fixture/http/preamble/preamble.http | 24 +- .../http/special-chars-in-filename/info.md | 0 .../osx-chrome-13.http | 50 +- .../osx-firefox-3.6.http | 46 +- .../osx-safari-5.http | 44 +- .../xp-chrome-12.http | 48 +- .../special-chars-in-filename/xp-ie-7.http | 44 +- .../special-chars-in-filename/xp-ie-8.http | 44 +- .../xp-safari-5.http | 44 +- .../http/workarounds/missing-hyphens1.http | 22 +- .../http/workarounds/missing-hyphens2.http | 22 +- .../multiparty/test/fixture/js/encoding.js | 0 .../multiparty/test/fixture/js/no-filename.js | 0 .../multiparty/test/fixture/js/preamble.js | 0 .../fixture/js/special-chars-in-filename.js | 0 .../multiparty/test/fixture/js/workarounds.js | 0 .../test/fixture/multi_video.upload | Bin .../multiparty/test/fixture/multipart.js | 0 .../multiparty/test/record.js | 0 .../standalone/test-connection-aborted.js | 0 .../test-content-transfer-encoding.js | 0 .../test/standalone/test-invalid.js | 0 .../test/standalone/test-issue-15.js | 0 .../test/standalone/test-issue-19.js | 0 .../test/standalone/test-issue-21.js | 0 .../test/standalone/test-issue-4.js | 0 .../test/standalone/test-issue-46.js | 0 .../test/standalone/test-issue-5.js | 0 .../node_modules => }/multiparty/test/test.js | 0 .../node_modules => }/muri/.npmignore | 0 .../node_modules => }/muri/.travis.yml | 0 .../node_modules => }/muri/History.md | 0 .../{mongoose/node_modules => }/muri/LICENSE | 0 .../{mongoose/node_modules => }/muri/Makefile | 0 .../node_modules => }/muri/README.md | 0 .../{mongoose/node_modules => }/muri/index.js | 0 .../node_modules => }/muri/lib/index.js | 0 node_modules/muri/package.json | 55 + .../node_modules => }/muri/test/index.js | 0 .../node_modules => }/negotiator/LICENSE | 0 .../negotiator/examples/accept.js | 0 .../negotiator/examples/charset.js | 0 .../negotiator/examples/encoding.js | 0 .../negotiator/examples/language.js | 0 .../negotiator/lib/charset.js | 0 .../negotiator/lib/encoding.js | 0 .../negotiator/lib/language.js | 0 .../negotiator/lib/mediaType.js | 0 .../negotiator/lib/negotiator.js | 0 node_modules/negotiator/package.json | 71 + .../node_modules => }/negotiator/readme.md | 0 .../negotiator/test/charset.js | 0 .../negotiator/test/encoding.js | 0 .../negotiator/test/language.js | 0 .../negotiator/test/mediaType.js | 0 .../node_modules => }/optimist/.travis.yml | 0 .../node_modules => }/optimist/LICENSE | 0 .../optimist/example/bool.js | 0 .../optimist/example/boolean_double.js | 0 .../optimist/example/boolean_single.js | 0 .../optimist/example/default_hash.js | 0 .../optimist/example/default_singles.js | 0 .../optimist/example/divide.js | 0 .../optimist/example/line_count.js | 0 .../optimist/example/line_count_options.js | 0 .../optimist/example/line_count_wrap.js | 0 .../optimist/example/nonopt.js | 0 .../optimist/example/reflect.js | 0 .../optimist/example/short.js | 0 .../optimist/example/string.js | 0 .../optimist/example/usage-options.js | 0 .../node_modules => }/optimist/example/xup.js | 0 .../node_modules => }/optimist/index.js | 0 node_modules/optimist/package.json | 68 + .../optimist/readme.markdown | 0 .../node_modules => }/optimist/test/_.js | 0 .../node_modules => }/optimist/test/_/argv.js | 0 .../node_modules => }/optimist/test/_/bin.js | 0 .../node_modules => }/optimist/test/parse.js | 0 .../node_modules => }/optimist/test/usage.js | 0 .../node_modules/send => pause}/.npmignore | 0 .../node_modules => }/pause/History.md | 0 .../cookie-signature => pause}/Makefile | 0 .../connect/node_modules => }/pause/Readme.md | 0 .../connect/node_modules => }/pause/index.js | 0 node_modules/pause/package.json | 41 + .../connect/node_modules => }/qs/.gitmodules | 0 .../connect/node_modules => }/qs/.npmignore | 0 .../connect/node_modules => }/qs/Readme.md | 0 .../connect/node_modules => }/qs/index.js | 0 node_modules/qs/package.json | 59 + .../node_modules => }/range-parser/.npmignore | 0 .../node_modules => }/range-parser/History.md | 0 .../node_modules => }/range-parser/Makefile | 0 .../node_modules => }/range-parser/Readme.md | 0 .../node_modules => }/range-parser/index.js | 0 node_modules/range-parser/package.json | 45 + .../node_modules => }/raw-body/.npmignore | 0 .../node_modules => }/raw-body/.travis.yml | 0 .../node_modules => }/raw-body/Makefile | 0 .../node_modules => }/raw-body/README.md | 0 .../node_modules => }/raw-body/index.js | 0 node_modules/raw-body/package.json | 62 + .../readable-stream/.npmignore | 0 .../node_modules => }/readable-stream/LICENSE | 0 node_modules/readable-stream/README.md | 15 + .../readable-stream/duplex.js | 0 .../readable-stream/float.patch | 0 .../readable-stream/lib/_stream_duplex.js | 24 +- .../lib/_stream_passthrough.js | 7 +- .../readable-stream/lib/_stream_readable.js | 85 +- .../readable-stream/lib/_stream_transform.js | 13 +- .../readable-stream/lib/_stream_writable.js | 35 +- node_modules/readable-stream/package.json | 66 + .../readable-stream/passthrough.js | 0 .../readable-stream/readable.js | 3 + .../readable-stream/transform.js | 0 .../readable-stream/writable.js | 0 .../node_modules => }/regexp-clone/.npmignore | 0 .../regexp-clone/.travis.yml | 0 .../node_modules => }/regexp-clone/History.md | 0 .../node_modules => }/regexp-clone/LICENSE | 0 .../node_modules => }/regexp-clone/Makefile | 0 .../node_modules => }/regexp-clone/README.md | 0 .../node_modules => }/regexp-clone/index.js | 0 node_modules/regexp-clone/package.json | 55 + .../regexp-clone/test/index.js | 0 .../node_modules => }/semver/.npmignore | 0 .../node_modules/sigmund => semver}/LICENSE | 0 .../node_modules => }/semver/Makefile | 0 .../node_modules => }/semver/README.md | 90 +- .../node_modules => }/semver/bin/semver | 5 +- .../node_modules => }/semver/foot.js | 0 .../node_modules => }/semver/head.js | 0 node_modules/semver/package.json | 53 + .../semver/semver.browser.js | 47 +- .../node_modules => }/semver/semver.js | 47 +- node_modules/semver/semver.min.js | 1 + .../node_modules => }/semver/test/amd.js | 0 .../node_modules => }/semver/test/gtr.js | 0 .../node_modules => }/semver/test/index.js | 40 +- .../node_modules => }/semver/test/ltr.js | 3 +- .../semver/test/no-module.js | 0 .../node_modules/debug => send}/.npmignore | 0 .../node_modules => }/send/History.md | 0 .../{express/node_modules => }/send/Makefile | 0 .../{express/node_modules => }/send/Readme.md | 0 .../{express/node_modules => }/send/index.js | 0 .../node_modules => }/send/lib/send.js | 0 .../node_modules => }/send/lib/utils.js | 0 node_modules/send/package.json | 64 + node_modules/sigmund/LICENSE | 15 + .../node_modules => }/sigmund/README.md | 6 +- .../node_modules => }/sigmund/bench.js | 0 node_modules/sigmund/package.json | 63 + .../node_modules => }/sigmund/sigmund.js | 0 .../node_modules => }/sigmund/test/basic.js | 0 .../node_modules => }/sliced/.npmignore | 0 .../node_modules => }/sliced/.travis.yml | 0 .../node_modules => }/sliced/History.md | 0 .../node_modules => }/sliced/LICENSE | 0 .../node_modules => }/sliced/Makefile | 0 .../node_modules => }/sliced/README.md | 0 .../node_modules => }/sliced/bench.js | 0 .../node_modules => }/sliced/component.json | 0 .../node_modules => }/sliced/index.js | 0 .../node_modules => }/sliced/lib/sliced.js | 0 node_modules/sliced/package.json | 57 + .../node_modules => }/sliced/test/index.js | 0 .../node_modules => }/source-map/.npmignore | 0 .../node_modules => }/source-map/.travis.yml | 0 node_modules/source-map/CHANGELOG.md | 194 ++ .../node_modules => }/source-map/LICENSE | 0 .../source-map/Makefile.dryice.js | 0 .../node_modules => }/source-map/README.md | 61 +- .../source-map/build/assert-shim.js | 0 .../source-map/build/mini-require.js | 0 .../source-map/build/prefix-source-map.jsm | 0 .../source-map/build/prefix-utils.jsm | 0 .../source-map/build/suffix-browser.js | 0 .../source-map/build/suffix-source-map.jsm | 0 .../source-map/build/suffix-utils.jsm | 0 .../source-map/build/test-prefix.js | 0 .../source-map/build/test-suffix.js | 0 .../source-map/lib/source-map.js | 0 .../source-map/lib/source-map/array-set.js | 0 .../source-map/lib/source-map/base64-vlq.js | 14 +- .../source-map/lib/source-map/base64.js | 0 .../lib/source-map/binary-search.js | 29 +- .../source-map/lib/source-map/mapping-list.js | 86 + .../lib/source-map/source-map-consumer.js | 184 +- .../lib/source-map/source-map-generator.js | 124 +- .../source-map/lib/source-map/source-node.js | 147 +- .../source-map/lib/source-map/util.js | 170 +- node_modules/source-map/package.json | 183 ++ .../source-map/test/run-tests.js | 19 +- .../source-map/test/source-map/test-api.js | 0 .../test/source-map/test-array-set.js | 0 .../test/source-map/test-base64-vlq.js | 5 +- .../source-map/test/source-map/test-base64.js | 0 .../test/source-map/test-binary-search.js | 8 +- .../test/source-map/test-dog-fooding.js | 22 +- .../source-map/test-source-map-consumer.js | 267 ++- .../source-map/test-source-map-generator.js | 262 +++ .../test/source-map/test-source-node.js | 309 ++- .../source-map/test/source-map/test-util.js | 216 ++ .../source-map/test/source-map/util.js | 31 + .../lru-cache => stream-counter}/.npmignore | 0 .../stream-counter/README.md | 0 .../node_modules => }/stream-counter/index.js | 0 node_modules/stream-counter/package.json | 53 + .../stream-counter/test/test.js | 0 .../stream-counter/test/test.txt | 0 node_modules/string_decoder/.npmignore | 2 + node_modules/string_decoder/LICENSE | 20 + node_modules/string_decoder/README.md | 7 + node_modules/string_decoder/index.js | 221 ++ node_modules/string_decoder/package.json | 53 + .../node_modules => }/uglify-js/.npmignore | 0 .../node_modules => }/uglify-js/.travis.yml | 0 .../node_modules => }/uglify-js/LICENSE | 0 .../node_modules => }/uglify-js/README.md | 0 .../node_modules => }/uglify-js/bin/uglifyjs | 0 .../node_modules => }/uglify-js/lib/ast.js | 0 .../uglify-js/lib/compress.js | 0 .../uglify-js/lib/mozilla-ast.js | 0 .../node_modules => }/uglify-js/lib/output.js | 0 .../node_modules => }/uglify-js/lib/parse.js | 0 .../node_modules => }/uglify-js/lib/scope.js | 0 .../uglify-js/lib/sourcemap.js | 0 .../uglify-js/lib/transform.js | 0 .../node_modules => }/uglify-js/lib/utils.js | 0 node_modules/uglify-js/package.json | 60 + .../uglify-js/test/compress/arrays.js | 0 .../uglify-js/test/compress/blocks.js | 0 .../uglify-js/test/compress/conditionals.js | 0 .../uglify-js/test/compress/dead-code.js | 0 .../uglify-js/test/compress/debugger.js | 0 .../uglify-js/test/compress/drop-unused.js | 0 .../uglify-js/test/compress/issue-105.js | 0 .../uglify-js/test/compress/issue-12.js | 0 .../uglify-js/test/compress/issue-143.js | 0 .../uglify-js/test/compress/issue-22.js | 0 .../uglify-js/test/compress/issue-44.js | 0 .../uglify-js/test/compress/issue-59.js | 0 .../uglify-js/test/compress/labels.js | 0 .../uglify-js/test/compress/loops.js | 0 .../uglify-js/test/compress/properties.js | 0 .../uglify-js/test/compress/sequences.js | 0 .../uglify-js/test/compress/switch.js | 0 .../uglify-js/test/compress/typeof.js | 0 .../uglify-js/test/run-tests.js | 0 .../node_modules => }/uglify-js/tools/node.js | 0 .../connect/node_modules => }/uid2/LICENSE | 0 .../connect/node_modules => }/uid2/index.js | 0 node_modules/uid2/package.json | 34 + node_modules/wordwrap/LICENSE | 18 + .../wordwrap/README.markdown | 0 .../wordwrap/example/center.js | 0 .../wordwrap/example/meat.js | 0 .../node_modules => }/wordwrap/index.js | 0 node_modules/wordwrap/package.json | 66 + .../node_modules => }/wordwrap/test/break.js | 0 .../wordwrap/test/idleness.txt | 0 .../node_modules => }/wordwrap/test/wrap.js | 0 package-lock.json | 409 ++++ package.json | 4 +- public/css/add-activity.css | 13 +- views/add.handlebars | 2 + 1060 files changed, 15926 insertions(+), 14566 deletions(-) rename node_modules/{express3-handlebars/node_modules => }/.bin/handlebars (100%) rename node_modules/{express3-handlebars/node_modules => }/.bin/semver (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/.bin/uglifyjs (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules => }/amdefine/LICENSE (96%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules => }/amdefine/README.md (99%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules => }/amdefine/amdefine.js (97%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules => }/amdefine/intercept.js (100%) create mode 100644 node_modules/amdefine/package.json rename node_modules/{express3-handlebars/node_modules => }/async/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules => }/async/README.md (100%) rename node_modules/{express3-handlebars/node_modules => }/async/component.json (100%) rename node_modules/{express3-handlebars/node_modules => }/async/lib/async.js (100%) mode change 100644 => 100755 create mode 100644 node_modules/async/package.json rename node_modules/{express/node_modules/connect/node_modules => }/batch/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/batch/History.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/batch/Makefile (100%) rename node_modules/{express/node_modules/connect/node_modules => }/batch/Readme.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/batch/component.json (100%) rename node_modules/{express/node_modules/connect/node_modules => }/batch/index.js (100%) create mode 100644 node_modules/batch/package.json rename node_modules/{mongodb/node_modules => }/bson/.travis.yml (100%) rename node_modules/{mongodb/node_modules => }/bson/Makefile (100%) rename node_modules/{mongodb/node_modules => }/bson/README.md (100%) rename node_modules/{mongodb/node_modules => }/bson/binding.gyp (100%) rename node_modules/{mongodb/node_modules => }/bson/browser_build/bson.js (100%) rename node_modules/{mongodb/node_modules => }/bson/browser_build/package.json (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb/node_modules => }/bson/build/Makefile (76%) create mode 100644 node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw rename node_modules/{mongodb/node_modules => }/bson/build/binding.Makefile (69%) create mode 100644 node_modules/bson/build/bson.target.mk create mode 100644 node_modules/bson/build/config.gypi create mode 100755 node_modules/bson/build/gyp-mac-tool rename node_modules/{mongodb/node_modules => }/bson/build_browser.js (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/Makefile (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/bson.cc (97%) rename node_modules/{mongodb/node_modules => }/bson/ext/bson.h (97%) rename node_modules/{mongodb/node_modules => }/bson/ext/index.js (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/nan.h (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/win32/ia32/bson.node (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/win32/x64/bson.node (100%) rename node_modules/{mongodb/node_modules => }/bson/ext/wscript (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/binary.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/binary_parser.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/bson.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/code.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/db_ref.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/double.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/float_parser.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/index.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/long.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/max_key.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/min_key.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/objectid.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/symbol.js (100%) rename node_modules/{mongodb/node_modules => }/bson/lib/bson/timestamp.js (100%) create mode 100644 node_modules/bson/package.json rename node_modules/{mongodb/node_modules => }/bson/tools/gleak.js (100%) rename node_modules/{mongodb/node_modules => }/bson/tools/jasmine-1.1.0/MIT.LICENSE (100%) rename node_modules/{mongodb/node_modules => }/bson/tools/jasmine-1.1.0/jasmine-html.js (100%) rename node_modules/{mongodb/node_modules => }/bson/tools/jasmine-1.1.0/jasmine.css (100%) rename node_modules/{mongodb/node_modules => }/bson/tools/jasmine-1.1.0/jasmine.js (100%) rename node_modules/{mongodb/node_modules => }/bson/tools/jasmine-1.1.0/jasmine_favicon.png (100%) rename node_modules/{express/node_modules => }/buffer-crc32/.npmignore (100%) rename node_modules/{express/node_modules => }/buffer-crc32/.travis.yml (100%) rename node_modules/{express/node_modules => }/buffer-crc32/README.md (100%) rename node_modules/{express/node_modules => }/buffer-crc32/index.js (100%) create mode 100644 node_modules/buffer-crc32/package.json rename node_modules/{express/node_modules => }/buffer-crc32/tests/crc.test.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/History.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/Makefile (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/Readme.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/component.json (100%) rename node_modules/{express/node_modules/connect/node_modules => }/bytes/index.js (100%) create mode 100644 node_modules/bytes/package.json rename node_modules/{express/node_modules => }/commander/History.md (100%) rename node_modules/{express/node_modules => }/commander/Readme.md (100%) rename node_modules/{express/node_modules => }/commander/index.js (100%) create mode 100644 node_modules/commander/package.json rename node_modules/{express/node_modules => }/connect/.npmignore (100%) rename node_modules/{express/node_modules => }/connect/.travis.yml (100%) rename node_modules/{express/node_modules => }/connect/LICENSE (100%) rename node_modules/{express/node_modules => }/connect/Readme.md (100%) rename node_modules/{express/node_modules => }/connect/index.js (100%) rename node_modules/{express/node_modules => }/connect/lib/cache.js (100%) rename node_modules/{express/node_modules => }/connect/lib/connect.js (100%) rename node_modules/{express/node_modules => }/connect/lib/index.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/basicAuth.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/bodyParser.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/compress.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/cookieParser.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/cookieSession.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/csrf.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/directory.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/errorHandler.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/favicon.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/json.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/limit.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/logger.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/methodOverride.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/multipart.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/query.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/responseTime.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/session.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/session/cookie.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/session/memory.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/session/session.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/session/store.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/static.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/staticCache.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/timeout.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/urlencoded.js (100%) rename node_modules/{express/node_modules => }/connect/lib/middleware/vhost.js (100%) rename node_modules/{express/node_modules => }/connect/lib/patch.js (100%) rename node_modules/{express/node_modules => }/connect/lib/proto.js (100%) rename node_modules/{express/node_modules => }/connect/lib/public/directory.html (100%) rename node_modules/{express/node_modules => }/connect/lib/public/error.html (100%) rename node_modules/{express/node_modules => }/connect/lib/public/favicon.ico (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/folder.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_add.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_attach.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_code.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_copy.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_delete.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_edit.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_error.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_excel.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_find.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_gear.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_go.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_green.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_key.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_lightning.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_link.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_paintbrush.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_paste.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_red.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_refresh.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_save.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_acrobat.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_actionscript.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_add.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_c.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_camera.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_cd.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_code.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_code_red.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_coldfusion.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_compressed.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_copy.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_cplusplus.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_csharp.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_cup.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_database.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_delete.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_dvd.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_edit.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_error.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_excel.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_find.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_flash.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_freehand.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_gear.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_get.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_go.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_h.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_horizontal.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_key.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_lightning.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_link.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_magnify.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_medal.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_office.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_paint.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_paintbrush.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_paste.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_php.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_picture.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_powerpoint.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_put.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_ruby.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_stack.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_star.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_swoosh.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_text.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_text_width.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_tux.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_vector.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_visualstudio.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_width.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_word.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_world.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_wrench.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_white_zip.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_word.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/icons/page_world.png (100%) rename node_modules/{express/node_modules => }/connect/lib/public/style.css (100%) rename node_modules/{express/node_modules => }/connect/lib/utils.js (100%) create mode 100644 node_modules/connect/package.json rename node_modules/{express/node_modules/connect/node_modules/pause => cookie-signature}/.npmignore (100%) rename node_modules/{express/node_modules => }/cookie-signature/History.md (100%) rename node_modules/{express/node_modules/connect/node_modules/pause => cookie-signature}/Makefile (100%) rename node_modules/{express/node_modules => }/cookie-signature/Readme.md (100%) rename node_modules/{express/node_modules => }/cookie-signature/index.js (100%) create mode 100644 node_modules/cookie-signature/package.json rename node_modules/{express/node_modules => }/cookie/.npmignore (100%) rename node_modules/{express/node_modules => }/cookie/.travis.yml (100%) rename node_modules/{express/node_modules => }/cookie/LICENSE (100%) rename node_modules/{express/node_modules => }/cookie/README.md (100%) rename node_modules/{express/node_modules => }/cookie/index.js (100%) create mode 100644 node_modules/cookie/package.json rename node_modules/{express/node_modules => }/cookie/test/mocha.opts (100%) rename node_modules/{express/node_modules => }/cookie/test/parse.js (100%) rename node_modules/{express/node_modules => }/cookie/test/serialize.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog => core-util-is}/LICENSE (93%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules => }/core-util-is/README.md (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules => }/core-util-is/float.patch (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is => core-util-is/lib}/util.js (88%) create mode 100644 node_modules/core-util-is/package.json create mode 100644 node_modules/core-util-is/test.js rename node_modules/{express/node_modules => }/debug/Readme.md (91%) rename node_modules/{express/node_modules => }/debug/debug.js (100%) rename node_modules/{express/node_modules => }/debug/lib/debug.js (85%) create mode 100644 node_modules/debug/package.json mode change 100644 => 100755 node_modules/express/benchmarks/run mode change 100644 => 100755 node_modules/express/bin/express delete mode 100644 node_modules/express/node_modules/buffer-crc32/package.json delete mode 100644 node_modules/express/node_modules/commander/node_modules/keypress/package.json delete mode 100644 node_modules/express/node_modules/commander/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/batch/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/bytes/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/README.md delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/lib/util.js delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/README.md delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/debuglog.js delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/multiparty/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/negotiator/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/pause/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/qs/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/raw-body/package.json delete mode 100644 node_modules/express/node_modules/connect/node_modules/uid2/package.json delete mode 100644 node_modules/express/node_modules/connect/package.json delete mode 100644 node_modules/express/node_modules/cookie-signature/package.json delete mode 100644 node_modules/express/node_modules/cookie/package.json delete mode 100644 node_modules/express/node_modules/debug/index.js delete mode 100644 node_modules/express/node_modules/debug/package.json delete mode 100644 node_modules/express/node_modules/fresh/package.json delete mode 100644 node_modules/express/node_modules/merge-descriptors/package.json delete mode 100644 node_modules/express/node_modules/methods/package.json delete mode 100644 node_modules/express/node_modules/mkdirp/package.json delete mode 100644 node_modules/express/node_modules/range-parser/package.json delete mode 100644 node_modules/express/node_modules/send/node_modules/mime/package.json delete mode 100644 node_modules/express/node_modules/send/package.json create mode 100644 node_modules/express3-handlebars/.npmignore 2 create mode 100644 node_modules/express3-handlebars/HISTORY 2.md create mode 100644 node_modules/express3-handlebars/LICENSE 2 create mode 100644 node_modules/express3-handlebars/README 2.md create mode 100644 node_modules/express3-handlebars/examples/advanced/app 2.js create mode 100644 node_modules/express3-handlebars/index 2.js create mode 100644 node_modules/express3-handlebars/lib/express-handlebars 2.js delete mode 100644 node_modules/express3-handlebars/node_modules/async/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits.js delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits_browser.js delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/test.js delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/foreach.js delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/glob/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/handlebars/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/semver/LICENSE delete mode 100644 node_modules/express3-handlebars/node_modules/semver/package.json delete mode 100644 node_modules/express3-handlebars/node_modules/semver/semver.min.js create mode 100644 node_modules/express3-handlebars/package 2.json rename node_modules/{express/node_modules => }/fresh/.npmignore (100%) rename node_modules/{express/node_modules => }/fresh/History.md (100%) rename node_modules/{express/node_modules => }/fresh/Makefile (100%) rename node_modules/{express/node_modules => }/fresh/Readme.md (100%) rename node_modules/{express/node_modules => }/fresh/index.js (100%) create mode 100644 node_modules/fresh/package.json rename node_modules/{express3-handlebars/node_modules => }/glob/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/.travis.yml (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/README.md (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/examples/g.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/examples/usr-local.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/glob.js (89%) create mode 100644 node_modules/glob/package.json rename node_modules/{express3-handlebars/node_modules => }/glob/test/00-setup.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/bash-comparison.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/bash-results.json (99%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/cwd-test.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/globstar-match.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/mark.js (73%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/new-glob-optional-options.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/nocase-nomagic.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/pause-resume.js (100%) create mode 100644 node_modules/glob/test/readme-issue.js rename node_modules/{express3-handlebars/node_modules => }/glob/test/root-nomount.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/root.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/stat.js (100%) rename node_modules/{express3-handlebars/node_modules => }/glob/test/zz-cleanup.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/README.markdown (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/bin/handlebars (100%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars.runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/ast.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/javascript-compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/parser.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/printer.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/compiler/visitor.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/exception.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/safe-string.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/amd/handlebars/utils.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars.runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/ast.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/javascript-compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/parser.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/printer.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/compiler/visitor.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/exception.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/safe-string.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/cjs/handlebars/utils.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.amd.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.amd.min.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.min.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.runtime.amd.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.runtime.amd.min.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/dist/handlebars.runtime.min.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars.runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/ast.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/base.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/javascript-compiler.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/parser.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/printer.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/compiler/visitor.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/exception.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/runtime.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/safe-string.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/handlebars/utils.js (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/lib/index.js (100%) create mode 100644 node_modules/handlebars/package.json rename node_modules/{express3-handlebars/node_modules => }/handlebars/release-notes.md (100%) rename node_modules/{express3-handlebars/node_modules => }/handlebars/runtime.js (100%) rename node_modules/{mongoose/node_modules => }/hooks/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/hooks/Makefile (100%) rename node_modules/{mongoose/node_modules => }/hooks/README.md (100%) rename node_modules/{mongoose/node_modules => }/hooks/hooks.alt.js (100%) rename node_modules/{mongoose/node_modules => }/hooks/hooks.js (100%) create mode 100644 node_modules/hooks/package.json rename node_modules/{mongoose/node_modules => }/hooks/test.js (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/inherits/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/inherits/README.md (100%) create mode 100644 node_modules/inherits/inherits.js create mode 100644 node_modules/inherits/inherits_browser.js create mode 100644 node_modules/inherits/package.json create mode 100644 node_modules/isarray/README.md create mode 100644 node_modules/isarray/build/build.js create mode 100644 node_modules/isarray/component.json create mode 100644 node_modules/isarray/index.js create mode 100644 node_modules/isarray/package.json rename node_modules/{mongodb/node_modules => }/kerberos/LICENSE (100%) rename node_modules/{mongodb/node_modules => }/kerberos/README.md (100%) rename node_modules/{mongodb/node_modules => }/kerberos/binding.gyp (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb/node_modules => }/kerberos/build/Makefile (76%) create mode 100644 node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos/lib/kerberos.o.d.raw rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb/node_modules => }/kerberos/build/binding.Makefile (70%) create mode 100644 node_modules/kerberos/build/config.gypi create mode 100755 node_modules/kerberos/build/gyp-mac-tool create mode 100644 node_modules/kerberos/build/kerberos.target.mk rename node_modules/{mongodb/node_modules => }/kerberos/index.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/auth_processes/mongodb.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/base64.c (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/base64.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberos.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberos.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberos.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberos_context.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberos_context.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberosgss.c (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/kerberosgss.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/sspi.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/base64.c (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/base64.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/kerberos.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/kerberos.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/kerberos_sspi.c (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/kerberos_sspi.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/worker.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/worker.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer_descriptor.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_buffer_descriptor.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_context.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_context.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_context.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_credentials.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_credentials.h (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/win32/wrappers/security_credentials.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/worker.cc (100%) rename node_modules/{mongodb/node_modules => }/kerberos/lib/worker.h (100%) create mode 100644 node_modules/kerberos/package.json rename node_modules/{mongodb/node_modules => }/kerberos/test/kerberos_tests.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/test/kerberos_win32_test.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/test/win32/security_buffer_descriptor_tests.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/test/win32/security_buffer_tests.js (100%) rename node_modules/{mongodb/node_modules => }/kerberos/test/win32/security_credentials_tests.js (100%) rename node_modules/{express/node_modules/commander/node_modules => }/keypress/README.md (100%) rename node_modules/{express/node_modules/commander/node_modules => }/keypress/index.js (100%) create mode 100644 node_modules/keypress/package.json rename node_modules/{express/node_modules/commander/node_modules => }/keypress/test.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty => lru-cache}/.npmignore (100%) create mode 100644 node_modules/lru-cache/.travis.yml rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/lru-cache/CONTRIBUTORS (100%) create mode 100644 node_modules/lru-cache/LICENSE rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/lru-cache/README.md (70%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/lru-cache/lib/lru-cache.js (70%) create mode 100644 node_modules/lru-cache/package.json rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/lru-cache/test/basic.js (86%) create mode 100644 node_modules/lru-cache/test/foreach.js rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/lru-cache/test/memory-leak.js (99%) create mode 100644 node_modules/lru-cache/test/serialize.js rename node_modules/{express/node_modules => }/merge-descriptors/.npmignore (100%) rename node_modules/{express/node_modules => }/merge-descriptors/README.md (100%) rename node_modules/{express/node_modules => }/merge-descriptors/component.json (100%) rename node_modules/{express/node_modules => }/merge-descriptors/index.js (100%) create mode 100644 node_modules/merge-descriptors/package.json rename node_modules/{express/node_modules => }/methods/History.md (100%) rename node_modules/{express/node_modules => }/methods/Readme.md (100%) rename node_modules/{express/node_modules => }/methods/index.js (100%) create mode 100644 node_modules/methods/package.json rename node_modules/{express/node_modules/send/node_modules => }/mime/LICENSE (100%) rename node_modules/{express/node_modules/send/node_modules => }/mime/README.md (100%) rename node_modules/{express/node_modules/send/node_modules => }/mime/mime.js (100%) create mode 100644 node_modules/mime/package.json rename node_modules/{express/node_modules/send/node_modules => }/mime/test.js (100%) rename node_modules/{express/node_modules/send/node_modules => }/mime/types/mime.types (100%) rename node_modules/{express/node_modules/send/node_modules => }/mime/types/node.types (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/README.md (98%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/minimatch.js (98%) create mode 100644 node_modules/minimatch/package.json rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/test/basic.js (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/test/brace-expand.js (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/test/caching.js (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/test/defaults.js (99%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules => }/minimatch/test/extglob-ending-with-state-char.js (100%) rename node_modules/{express/node_modules => }/mkdirp/.npmignore (100%) rename node_modules/{express/node_modules => }/mkdirp/.travis.yml (100%) rename node_modules/{express/node_modules => }/mkdirp/LICENSE (100%) rename node_modules/{express/node_modules => }/mkdirp/examples/pow.js (100%) rename node_modules/{express/node_modules => }/mkdirp/index.js (100%) create mode 100644 node_modules/mkdirp/package.json rename node_modules/{express/node_modules => }/mkdirp/readme.markdown (100%) rename node_modules/{express/node_modules => }/mkdirp/test/chmod.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/clobber.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/mkdirp.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/perm.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/perm_sync.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/race.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/rel.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/return.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/return_sync.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/root.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/sync.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/umask.js (100%) rename node_modules/{express/node_modules => }/mkdirp/test/umask_sync.js (100%) mode change 100644 => 100755 node_modules/mongodb/index.js delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/bson.node delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/linker.lock delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson.node delete mode 100644 node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson/ext/bson.o delete mode 100644 node_modules/mongodb/node_modules/bson/build/bson.target.mk delete mode 100644 node_modules/mongodb/node_modules/bson/build/config.gypi delete mode 100644 node_modules/mongodb/node_modules/bson/package.json delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Makefile delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/binding.Makefile delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/config.gypi delete mode 100644 node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk delete mode 100644 node_modules/mongodb/node_modules/kerberos/package.json mode change 100644 => 100755 node_modules/mongodb/package.json mode change 100644 => 100755 node_modules/mongoose/contRun.sh delete mode 100644 node_modules/mongoose/node_modules/hooks/package.json delete mode 100644 node_modules/mongoose/node_modules/mpath/package.json delete mode 100644 node_modules/mongoose/node_modules/mpromise/package.json delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/debug/package.json delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/bson.node delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/linker.lock delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson.node delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson/ext/bson.o delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/bson.target.mk delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/config.gypi delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/package.json delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/LICENSE delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/README.md delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/binding.gyp delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/config.gypi delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/index.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.c delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/sspi.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.cc delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.h delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/package.json delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js delete mode 100644 node_modules/mongoose/node_modules/mquery/node_modules/mongodb/package.json delete mode 100644 node_modules/mongoose/node_modules/mquery/package.json delete mode 100644 node_modules/mongoose/node_modules/ms/.npmignore delete mode 100644 node_modules/mongoose/node_modules/ms/package.json delete mode 100644 node_modules/mongoose/node_modules/muri/package.json delete mode 100644 node_modules/mongoose/node_modules/regexp-clone/package.json delete mode 100644 node_modules/mongoose/node_modules/sliced/package.json rename node_modules/{mongoose/node_modules => }/mpath/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/mpath/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/mpath/History.md (100%) rename node_modules/{mongoose/node_modules => }/mpath/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/mpath/Makefile (100%) rename node_modules/{mongoose/node_modules => }/mpath/README.md (100%) rename node_modules/{mongoose/node_modules => }/mpath/index.js (100%) rename node_modules/{mongoose/node_modules => }/mpath/lib/index.js (100%) create mode 100644 node_modules/mpath/package.json rename node_modules/{mongoose/node_modules => }/mpath/test/index.js (100%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/.name (100%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/codeStyleSettings.xml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/encodings.xml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/inspectionProfiles/Project_Default.xml (98%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/inspectionProfiles/profiles_settings.xml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/jsLibraryMappings.xml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml (96%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/misc.xml (96%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/modules.xml (96%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/mpromise.iml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/other.xml (95%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/scopes/scope_settings.xml (97%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/vcs.xml (96%) rename node_modules/{mongoose/node_modules => }/mpromise/.idea/workspace.xml (98%) rename node_modules/{mongoose/node_modules => }/mpromise/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/mpromise/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/mpromise/History.md (100%) rename node_modules/{mongoose/node_modules => }/mpromise/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/mpromise/README.md (100%) rename node_modules/{mongoose/node_modules => }/mpromise/index.js (100%) rename node_modules/{mongoose/node_modules => }/mpromise/lib/promise.js (100%) create mode 100644 node_modules/mpromise/package.json rename node_modules/{mongoose/node_modules => }/mpromise/test/promise.domain.test.js (100%) rename node_modules/{mongoose/node_modules => }/mpromise/test/promise.test.js (100%) rename node_modules/{mongoose/node_modules => }/mpromise/test/promises.Aplus.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/mquery/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/mquery/History.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/mquery/Makefile (100%) rename node_modules/{mongoose/node_modules => }/mquery/README.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/index.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/collection/collection.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/collection/index.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/collection/node.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/env.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/mquery.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/permissions.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/lib/utils.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/.travis.yml (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/Makefile (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/README.md (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/binding.gyp (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/browser_build/bson.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/browser_build/package.json (100%) rename node_modules/{mongodb => mquery}/node_modules/bson/build/Makefile (75%) create mode 100644 node_modules/mquery/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/build/binding.Makefile (69%) create mode 100644 node_modules/mquery/node_modules/bson/build/bson.target.mk create mode 100644 node_modules/mquery/node_modules/bson/build/config.gypi create mode 100755 node_modules/mquery/node_modules/bson/build/gyp-mac-tool rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/build_browser.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/Makefile (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/bson.cc (97%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/bson.h (97%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/index.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/win32/ia32/bson.node (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/win32/x64/bson.node (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/ext/wscript (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/binary.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/binary_parser.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/bson.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/code.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/db_ref.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/double.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/float_parser.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/index.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/long.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/max_key.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/min_key.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/objectid.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/symbol.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/lib/bson/timestamp.js (100%) create mode 100644 node_modules/mquery/node_modules/bson/package.json rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/gleak.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/jasmine-1.1.0/jasmine.css (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/jasmine-1.1.0/jasmine.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/mongodb => mquery}/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png (100%) rename node_modules/{express/node_modules/cookie-signature => mquery/node_modules/debug}/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/History.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/Makefile (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/Readme.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/debug.component.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/debug.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/example/app.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/example/browser.html (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/example/wildcards.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/example/worker.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/head.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/index.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/lib/debug.js (100%) create mode 100644 node_modules/mquery/node_modules/debug/package.json rename node_modules/{mongoose/node_modules => }/mquery/node_modules/debug/tail.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/CONTRIBUTING.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/Makefile (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/Readme.md (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/index.js (100%) mode change 100644 => 100755 rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/admin.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_cr.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_gssapi.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_plain.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_sspi.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/collection.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/base_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/db_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/delete_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/get_more_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/insert_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/kill_cursor_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/query_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/commands/update_command.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/base.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/connection.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/connection_pool.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/connection_utils.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/mongos.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/read_preference.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/ha.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/options.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set_state.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/ping_strategy.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/statistics_strategy.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/server.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/connection/url_parser.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/cursor.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/cursorstream.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/db.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/gridfs/chunk.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/gridfs/grid.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/gridfs/gridstore.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/gridfs/readstream.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/index.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/mongo_client.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/responses/mongo_reply.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/lib/mongodb/utils.js (100%) create mode 100755 node_modules/mquery/node_modules/mongodb/package.json rename node_modules/{mongoose/node_modules => }/mquery/node_modules/mongodb/t.js (100%) create mode 100644 node_modules/mquery/package.json rename node_modules/{mongoose/node_modules => }/mquery/test/collection/browser.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/test/collection/mongo.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/test/collection/node.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/test/env.js (100%) rename node_modules/{mongoose/node_modules => }/mquery/test/index.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap => ms}/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/ms/Makefile (100%) rename node_modules/{mongoose/node_modules => }/ms/README.md (100%) rename node_modules/{mongoose/node_modules => }/ms/ms.js (100%) create mode 100644 node_modules/ms/package.json rename node_modules/{mongoose/node_modules => }/ms/test/index.html (100%) rename node_modules/{mongoose/node_modules => }/ms/test/support/jquery.js (100%) rename node_modules/{mongoose/node_modules => }/ms/test/test.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/.jshintrc (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter => multiparty}/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/.travis.yml (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/CHANGELOG.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/LICENSE (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/README.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/examples/azureblobstorage.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/examples/s3.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/examples/upload.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/index.js (100%) mode change 100644 => 100755 create mode 100644 node_modules/multiparty/package.json rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/bench-multipart-parser.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/beta-sticker-1.png (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/blank.gif (100%) mode change 100644 => 100755 rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/funkyfilename.txt (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/menu_separator.png (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/pf1y5.png (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/file/plain.txt (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http (99%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http (98%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/blank.gif.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/menu_seperator.png.http (99%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/pf1y5.png.http (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/encoding/plain.txt.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/no-filename/filename-name.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/no-filename/generic.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/preamble/crlf.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/preamble/preamble.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/info.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http (97%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/workarounds/missing-hyphens1.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/http/workarounds/missing-hyphens2.http (96%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/js/encoding.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/js/no-filename.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/js/preamble.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/js/special-chars-in-filename.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/js/workarounds.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/multi_video.upload (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/fixture/multipart.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/record.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-connection-aborted.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-content-transfer-encoding.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-invalid.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-15.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-19.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-21.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-4.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-46.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/standalone/test-issue-5.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/multiparty/test/test.js (100%) rename node_modules/{mongoose/node_modules => }/muri/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/muri/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/muri/History.md (100%) rename node_modules/{mongoose/node_modules => }/muri/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/muri/Makefile (100%) rename node_modules/{mongoose/node_modules => }/muri/README.md (100%) rename node_modules/{mongoose/node_modules => }/muri/index.js (100%) rename node_modules/{mongoose/node_modules => }/muri/lib/index.js (100%) create mode 100644 node_modules/muri/package.json rename node_modules/{mongoose/node_modules => }/muri/test/index.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/LICENSE (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/examples/accept.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/examples/charset.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/examples/encoding.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/examples/language.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/lib/charset.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/lib/encoding.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/lib/language.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/lib/mediaType.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/lib/negotiator.js (100%) create mode 100644 node_modules/negotiator/package.json rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/readme.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/test/charset.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/test/encoding.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/test/language.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/negotiator/test/mediaType.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/.travis.yml (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/bool.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/boolean_double.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/boolean_single.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/default_hash.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/default_singles.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/divide.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/line_count.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/line_count_options.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/line_count_wrap.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/nonopt.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/reflect.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/short.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/string.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/usage-options.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/example/xup.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/index.js (100%) create mode 100644 node_modules/optimist/package.json rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/readme.markdown (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/test/_.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/test/_/argv.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/test/_/bin.js (100%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/test/parse.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/optimist/test/usage.js (100%) rename node_modules/{express/node_modules/send => pause}/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/pause/History.md (100%) rename node_modules/{express/node_modules/cookie-signature => pause}/Makefile (100%) rename node_modules/{express/node_modules/connect/node_modules => }/pause/Readme.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/pause/index.js (100%) create mode 100644 node_modules/pause/package.json rename node_modules/{express/node_modules/connect/node_modules => }/qs/.gitmodules (100%) rename node_modules/{express/node_modules/connect/node_modules => }/qs/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/qs/Readme.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/qs/index.js (100%) create mode 100644 node_modules/qs/package.json rename node_modules/{express/node_modules => }/range-parser/.npmignore (100%) rename node_modules/{express/node_modules => }/range-parser/History.md (100%) rename node_modules/{express/node_modules => }/range-parser/Makefile (100%) rename node_modules/{express/node_modules => }/range-parser/Readme.md (100%) rename node_modules/{express/node_modules => }/range-parser/index.js (100%) create mode 100644 node_modules/range-parser/package.json rename node_modules/{express/node_modules/connect/node_modules => }/raw-body/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules => }/raw-body/.travis.yml (100%) rename node_modules/{express/node_modules/connect/node_modules => }/raw-body/Makefile (100%) rename node_modules/{express/node_modules/connect/node_modules => }/raw-body/README.md (100%) rename node_modules/{express/node_modules/connect/node_modules => }/raw-body/index.js (100%) create mode 100644 node_modules/raw-body/package.json rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/LICENSE (100%) create mode 100644 node_modules/readable-stream/README.md rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/duplex.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/float.patch (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/lib/_stream_duplex.js (84%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/lib/_stream_passthrough.js (93%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/lib/_stream_readable.js (95%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/lib/_stream_transform.js (97%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/lib/_stream_writable.js (94%) create mode 100644 node_modules/readable-stream/package.json rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/passthrough.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/readable.js (76%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/transform.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/readable-stream/writable.js (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/History.md (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/Makefile (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/README.md (100%) rename node_modules/{mongoose/node_modules => }/regexp-clone/index.js (100%) create mode 100644 node_modules/regexp-clone/package.json rename node_modules/{mongoose/node_modules => }/regexp-clone/test/index.js (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund => semver}/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/Makefile (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/README.md (55%) rename node_modules/{express3-handlebars/node_modules => }/semver/bin/semver (94%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules => }/semver/foot.js (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/head.js (100%) create mode 100644 node_modules/semver/package.json rename node_modules/{express3-handlebars/node_modules => }/semver/semver.browser.js (94%) rename node_modules/{express3-handlebars/node_modules => }/semver/semver.js (94%) create mode 100644 node_modules/semver/semver.min.js rename node_modules/{express3-handlebars/node_modules => }/semver/test/amd.js (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/test/gtr.js (100%) rename node_modules/{express3-handlebars/node_modules => }/semver/test/index.js (95%) rename node_modules/{express3-handlebars/node_modules => }/semver/test/ltr.js (98%) rename node_modules/{express3-handlebars/node_modules => }/semver/test/no-module.js (100%) rename node_modules/{mongoose/node_modules/mquery/node_modules/debug => send}/.npmignore (100%) rename node_modules/{express/node_modules => }/send/History.md (100%) rename node_modules/{express/node_modules => }/send/Makefile (100%) rename node_modules/{express/node_modules => }/send/Readme.md (100%) rename node_modules/{express/node_modules => }/send/index.js (100%) rename node_modules/{express/node_modules => }/send/lib/send.js (100%) rename node_modules/{express/node_modules => }/send/lib/utils.js (100%) create mode 100644 node_modules/send/package.json create mode 100644 node_modules/sigmund/LICENSE rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/sigmund/README.md (91%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/sigmund/bench.js (100%) create mode 100644 node_modules/sigmund/package.json rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/sigmund/sigmund.js (100%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules => }/sigmund/test/basic.js (100%) rename node_modules/{mongoose/node_modules => }/sliced/.npmignore (100%) rename node_modules/{mongoose/node_modules => }/sliced/.travis.yml (100%) rename node_modules/{mongoose/node_modules => }/sliced/History.md (100%) rename node_modules/{mongoose/node_modules => }/sliced/LICENSE (100%) rename node_modules/{mongoose/node_modules => }/sliced/Makefile (100%) rename node_modules/{mongoose/node_modules => }/sliced/README.md (100%) rename node_modules/{mongoose/node_modules => }/sliced/bench.js (100%) rename node_modules/{mongoose/node_modules => }/sliced/component.json (100%) rename node_modules/{mongoose/node_modules => }/sliced/index.js (100%) rename node_modules/{mongoose/node_modules => }/sliced/lib/sliced.js (100%) create mode 100644 node_modules/sliced/package.json rename node_modules/{mongoose/node_modules => }/sliced/test/index.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/.travis.yml (100%) create mode 100644 node_modules/source-map/CHANGELOG.md rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/Makefile.dryice.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/README.md (85%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/assert-shim.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/mini-require.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/prefix-source-map.jsm (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/prefix-utils.jsm (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/suffix-browser.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/suffix-source-map.jsm (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/suffix-utils.jsm (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/test-prefix.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/build/test-suffix.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/array-set.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/base64-vlq.js (93%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/base64.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/binary-search.js (80%) create mode 100644 node_modules/source-map/lib/source-map/mapping-list.js rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/source-map-consumer.js (72%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/source-map-generator.js (76%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/source-node.js (72%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/lib/source-map/util.js (57%) create mode 100644 node_modules/source-map/package.json rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/run-tests.js (82%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-api.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-array-set.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-base64-vlq.js (86%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-base64.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-binary-search.js (82%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-dog-fooding.js (74%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-source-map-consumer.js (65%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-source-map-generator.js (58%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/test-source-node.js (58%) create mode 100644 node_modules/source-map/test/source-map/test-util.js rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules => }/source-map/test/source-map/util.js (86%) rename node_modules/{express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache => stream-counter}/.npmignore (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/stream-counter/README.md (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/stream-counter/index.js (100%) create mode 100644 node_modules/stream-counter/package.json rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/stream-counter/test/test.js (100%) rename node_modules/{express/node_modules/connect/node_modules/multiparty/node_modules => }/stream-counter/test/test.txt (100%) create mode 100644 node_modules/string_decoder/.npmignore create mode 100644 node_modules/string_decoder/LICENSE create mode 100644 node_modules/string_decoder/README.md create mode 100644 node_modules/string_decoder/index.js create mode 100644 node_modules/string_decoder/package.json rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/.npmignore (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/.travis.yml (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/LICENSE (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/README.md (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/bin/uglifyjs (100%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/ast.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/compress.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/mozilla-ast.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/output.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/parse.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/scope.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/sourcemap.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/transform.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/lib/utils.js (100%) create mode 100644 node_modules/uglify-js/package.json rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/arrays.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/blocks.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/conditionals.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/dead-code.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/debugger.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/drop-unused.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-105.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-12.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-143.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-22.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-44.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/issue-59.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/labels.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/loops.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/properties.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/sequences.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/switch.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/compress/typeof.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/test/run-tests.js (100%) mode change 100644 => 100755 rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules => }/uglify-js/tools/node.js (100%) rename node_modules/{express/node_modules/connect/node_modules => }/uid2/LICENSE (100%) rename node_modules/{express/node_modules/connect/node_modules => }/uid2/index.js (100%) create mode 100644 node_modules/uid2/package.json create mode 100644 node_modules/wordwrap/LICENSE rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/README.markdown (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/example/center.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/example/meat.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/index.js (100%) create mode 100644 node_modules/wordwrap/package.json rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/test/break.js (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/test/idleness.txt (100%) rename node_modules/{express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules => }/wordwrap/test/wrap.js (100%) create mode 100644 package-lock.json diff --git a/node_modules/express3-handlebars/node_modules/.bin/handlebars b/node_modules/.bin/handlebars similarity index 100% rename from node_modules/express3-handlebars/node_modules/.bin/handlebars rename to node_modules/.bin/handlebars diff --git a/node_modules/express3-handlebars/node_modules/.bin/semver b/node_modules/.bin/semver similarity index 100% rename from node_modules/express3-handlebars/node_modules/.bin/semver rename to node_modules/.bin/semver diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/.bin/uglifyjs b/node_modules/.bin/uglifyjs similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/.bin/uglifyjs rename to node_modules/.bin/uglifyjs diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/LICENSE b/node_modules/amdefine/LICENSE similarity index 96% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/LICENSE rename to node_modules/amdefine/LICENSE index f33d665de..9b25ee006 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/LICENSE +++ b/node_modules/amdefine/LICENSE @@ -6,7 +6,7 @@ provided below. The "New" BSD License: ---------------------- -Copyright (c) 2011, The Dojo Foundation +Copyright (c) 2011-2016, The Dojo Foundation All rights reserved. Redistribution and use in source and binary forms, with or without @@ -37,7 +37,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. MIT License ----------- -Copyright (c) 2011, The Dojo Foundation +Copyright (c) 2011-2016, The Dojo Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/README.md b/node_modules/amdefine/README.md similarity index 99% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/README.md rename to node_modules/amdefine/README.md index c6995c072..037a6e817 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/README.md +++ b/node_modules/amdefine/README.md @@ -59,7 +59,7 @@ intercept module affects all .js files loaded in the Node app, and it is inconsiderate to modify global state like that unless you are also controlling the top level app. -#### Why distribute AMD-style nodes via npm? +#### Why distribute AMD-style modules via npm? npm has a lot of weaknesses for front-end use (installed layout is not great, should have better support for the `baseUrl + moduleID + '.js' style of loading, diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/amdefine.js b/node_modules/amdefine/amdefine.js similarity index 97% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/amdefine.js rename to node_modules/amdefine/amdefine.js index 53bf5a686..ca830ba4f 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/amdefine.js +++ b/node_modules/amdefine/amdefine.js @@ -1,5 +1,5 @@ /** vim: et:ts=4:sw=4:sts=4 - * @license amdefine 0.1.0 Copyright (c) 2011, The Dojo Foundation All Rights Reserved. + * @license amdefine 1.0.1 Copyright (c) 2011-2016, The Dojo Foundation All Rights Reserved. * Available via the MIT or new BSD license. * see: http://github.com/jrburke/amdefine for details */ @@ -120,9 +120,11 @@ function amdefine(module, requireFn) { }); //Wait for next tick to call back the require call. - process.nextTick(function () { - callback.apply(null, deps); - }); + if (callback) { + process.nextTick(function () { + callback.apply(null, deps); + }); + } } } diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/intercept.js b/node_modules/amdefine/intercept.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/intercept.js rename to node_modules/amdefine/intercept.js diff --git a/node_modules/amdefine/package.json b/node_modules/amdefine/package.json new file mode 100644 index 000000000..0d144704a --- /dev/null +++ b/node_modules/amdefine/package.json @@ -0,0 +1,48 @@ +{ + "_from": "amdefine@>=0.0.4", + "_id": "amdefine@1.0.1", + "_inBundle": false, + "_integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", + "_location": "/amdefine", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "amdefine@>=0.0.4", + "name": "amdefine", + "escapedName": "amdefine", + "rawSpec": ">=0.0.4", + "saveSpec": null, + "fetchSpec": ">=0.0.4" + }, + "_requiredBy": [ + "/source-map" + ], + "_resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "_shasum": "4a5282ac164729e93619bcfd3ad151f817ce91f5", + "_spec": "amdefine@>=0.0.4", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/source-map", + "author": { + "name": "James Burke", + "email": "jrburke@gmail.com", + "url": "http://github.com/jrburke" + }, + "bugs": { + "url": "https://github.com/jrburke/amdefine/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Provide AMD's define() API for declaring modules in the AMD format", + "engines": { + "node": ">=0.4.2" + }, + "homepage": "http://github.com/jrburke/amdefine", + "license": "BSD-3-Clause OR MIT", + "main": "./amdefine.js", + "name": "amdefine", + "repository": { + "type": "git", + "url": "git+https://github.com/jrburke/amdefine.git" + }, + "version": "1.0.1" +} diff --git a/node_modules/express3-handlebars/node_modules/async/LICENSE b/node_modules/async/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/async/LICENSE rename to node_modules/async/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/async/README.md b/node_modules/async/README.md similarity index 100% rename from node_modules/express3-handlebars/node_modules/async/README.md rename to node_modules/async/README.md diff --git a/node_modules/express3-handlebars/node_modules/async/component.json b/node_modules/async/component.json similarity index 100% rename from node_modules/express3-handlebars/node_modules/async/component.json rename to node_modules/async/component.json diff --git a/node_modules/express3-handlebars/node_modules/async/lib/async.js b/node_modules/async/lib/async.js old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express3-handlebars/node_modules/async/lib/async.js rename to node_modules/async/lib/async.js diff --git a/node_modules/async/package.json b/node_modules/async/package.json new file mode 100644 index 000000000..106fba2b2 --- /dev/null +++ b/node_modules/async/package.json @@ -0,0 +1,65 @@ +{ + "_from": "async@~0.2", + "_id": "async@0.2.10", + "_inBundle": false, + "_integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=", + "_location": "/async", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "async@~0.2", + "name": "async", + "escapedName": "async", + "rawSpec": "~0.2", + "saveSpec": null, + "fetchSpec": "~0.2" + }, + "_requiredBy": [ + "/express3-handlebars", + "/uglify-js" + ], + "_resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "_shasum": "b6bbe0b0674b9d719708ca38de8c237cb526c3d1", + "_spec": "async@~0.2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express3-handlebars", + "author": { + "name": "Caolan McMahon" + }, + "bugs": { + "url": "https://github.com/caolan/async/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Higher-order functions and common patterns for asynchronous code", + "devDependencies": { + "nodelint": ">0.0.0", + "nodeunit": ">0.0.0", + "uglify-js": "1.2.x" + }, + "homepage": "https://github.com/caolan/async#readme", + "jam": { + "main": "lib/async.js", + "include": [ + "lib/async.js", + "README.md", + "LICENSE" + ] + }, + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/caolan/async/raw/master/LICENSE" + } + ], + "main": "./lib/async", + "name": "async", + "repository": { + "type": "git", + "url": "git+https://github.com/caolan/async.git" + }, + "scripts": { + "test": "nodeunit test/test-async.js" + }, + "version": "0.2.10" +} diff --git a/node_modules/express/node_modules/connect/node_modules/batch/.npmignore b/node_modules/batch/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/.npmignore rename to node_modules/batch/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/batch/History.md b/node_modules/batch/History.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/History.md rename to node_modules/batch/History.md diff --git a/node_modules/express/node_modules/connect/node_modules/batch/Makefile b/node_modules/batch/Makefile similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/Makefile rename to node_modules/batch/Makefile diff --git a/node_modules/express/node_modules/connect/node_modules/batch/Readme.md b/node_modules/batch/Readme.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/Readme.md rename to node_modules/batch/Readme.md diff --git a/node_modules/express/node_modules/connect/node_modules/batch/component.json b/node_modules/batch/component.json similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/component.json rename to node_modules/batch/component.json diff --git a/node_modules/express/node_modules/connect/node_modules/batch/index.js b/node_modules/batch/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/batch/index.js rename to node_modules/batch/index.js diff --git a/node_modules/batch/package.json b/node_modules/batch/package.json new file mode 100644 index 000000000..8bab19013 --- /dev/null +++ b/node_modules/batch/package.json @@ -0,0 +1,39 @@ +{ + "_from": "batch@0.5.0", + "_id": "batch@0.5.0", + "_inBundle": false, + "_integrity": "sha1-/S4Fp6XWlrTbkxQBPihdj/NVfsM=", + "_location": "/batch", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "batch@0.5.0", + "name": "batch", + "escapedName": "batch", + "rawSpec": "0.5.0", + "saveSpec": null, + "fetchSpec": "0.5.0" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", + "_shasum": "fd2e05a7a5d696b4db9314013e285d8ff3557ec3", + "_spec": "batch@0.5.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Simple async batch", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "main": "index", + "name": "batch", + "version": "0.5.0" +} diff --git a/node_modules/mongodb/node_modules/bson/.travis.yml b/node_modules/bson/.travis.yml similarity index 100% rename from node_modules/mongodb/node_modules/bson/.travis.yml rename to node_modules/bson/.travis.yml diff --git a/node_modules/mongodb/node_modules/bson/Makefile b/node_modules/bson/Makefile similarity index 100% rename from node_modules/mongodb/node_modules/bson/Makefile rename to node_modules/bson/Makefile diff --git a/node_modules/mongodb/node_modules/bson/README.md b/node_modules/bson/README.md similarity index 100% rename from node_modules/mongodb/node_modules/bson/README.md rename to node_modules/bson/README.md diff --git a/node_modules/mongodb/node_modules/bson/binding.gyp b/node_modules/bson/binding.gyp similarity index 100% rename from node_modules/mongodb/node_modules/bson/binding.gyp rename to node_modules/bson/binding.gyp diff --git a/node_modules/mongodb/node_modules/bson/browser_build/bson.js b/node_modules/bson/browser_build/bson.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/browser_build/bson.js rename to node_modules/bson/browser_build/bson.js diff --git a/node_modules/mongodb/node_modules/bson/browser_build/package.json b/node_modules/bson/browser_build/package.json similarity index 100% rename from node_modules/mongodb/node_modules/bson/browser_build/package.json rename to node_modules/bson/browser_build/package.json diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Makefile b/node_modules/bson/build/Makefile similarity index 76% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Makefile rename to node_modules/bson/build/Makefile index 647bff8f0..34839705a 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Makefile +++ b/node_modules/bson/build/Makefile @@ -41,30 +41,22 @@ all_deps := CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) +CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) +CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) LINK.target ?= $(LINK) LDFLAGS.target ?= $(LDFLAGS) AR.target ?= $(AR) # C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= flock $(builddir)/linker.lock $(CXX.target) +LINK ?= $(CXX.target) # TODO(evan): move all cross-compilation logic to gyp-time so we don't need # to replicate this environment fallback in make as well. CC.host ?= gcc -CFLAGS.host ?= +CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) CXX.host ?= g++ -CXXFLAGS.host ?= +CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) LINK.host ?= $(CXX.host) LDFLAGS.host ?= AR.host ?= ar @@ -134,6 +126,34 @@ cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $ quiet_cmd_cxx = CXX($(TOOLSET)) $@ cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" + quiet_cmd_touch = TOUCH $@ cmd_touch = touch $@ @@ -141,39 +161,17 @@ quiet_cmd_copy = COPY $@ # send stderr to /dev/null to ignore messages when linking directories. cmd_copy = rm -rf "$@" && cp -af "$<" "$@" -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) # Define an escape_quotes function to escape single quotes. @@ -238,7 +236,7 @@ define do_cmd $(if $(or $(command_changed),$(prereq_changed)), @$(call exact_echo, $($(quiet)cmd_$(1))) @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 1,$(cmd_$1))), + $(if $(findstring flock,$(word 2,$(cmd_$1))), @$(cmd_$(1)) @echo " $(quiet_cmd_$(1)): Finished", @$(cmd_$(1)) @@ -276,6 +274,10 @@ $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD @@ -290,6 +292,10 @@ $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD @@ -303,6 +309,10 @@ $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD @@ -316,8 +326,8 @@ ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ endif quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/home/vagrant/lab4/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/home/vagrant/.node-gyp/0.10.24/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/home/vagrant/.node-gyp/0.10.24" "-Dmodule_root_dir=/home/vagrant/lab4/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson" binding.gyp -Makefile: $(srcdir)/../../../../../../../../../.node-gyp/0.10.24/common.gypi $(srcdir)/../../../../../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp +cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/bson/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4" "-Dnode_gyp_dir=/usr/local/lib/node_modules/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/<(target_arch)/node.lib" "-Dmodule_root_dir=/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/bson" "-Dnode_engine=v8" binding.gyp +Makefile: $(srcdir)/../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/../../../../../Library/Caches/node-gyp/14.15.4/include/node/common.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(call do_cmd,regen_makefile) # "all" is a concatenation of the "all" targets from all the included diff --git a/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw b/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw new file mode 100644 index 000000000..9dea225ab --- /dev/null +++ b/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw @@ -0,0 +1,19 @@ +Release/obj.target/bson/ext/bson.o: ../ext/bson.cc \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/cppgc/common.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8config.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-internal.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-platform.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_buffer.h \ + ../ext/bson.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_object_wrap.h \ + ../ext/nan.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv/errno.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv/version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv/unix.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv/threadpool.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/uv/darwin.h diff --git a/node_modules/mongodb/node_modules/bson/build/binding.Makefile b/node_modules/bson/build/binding.Makefile similarity index 69% rename from node_modules/mongodb/node_modules/bson/build/binding.Makefile rename to node_modules/bson/build/binding.Makefile index 90bf8247b..d7430e6d7 100644 --- a/node_modules/mongodb/node_modules/bson/build/binding.Makefile +++ b/node_modules/bson/build/binding.Makefile @@ -1,6 +1,6 @@ # This file is generated by gyp; do not edit. -export builddir_name ?= build/./. +export builddir_name ?= ./build/. .PHONY: all all: $(MAKE) bson diff --git a/node_modules/bson/build/bson.target.mk b/node_modules/bson/build/bson.target.mk new file mode 100644 index 000000000..b58e04ad3 --- /dev/null +++ b/node_modules/bson/build/bson.target.mk @@ -0,0 +1,188 @@ +# This file is generated by gyp; do not edit. + +TOOLSET := target +TARGET := bson +DEFS_Debug := \ + '-DNODE_GYP_MODULE_NAME=bson' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-DBUILDING_NODE_EXTENSION' \ + '-DDEBUG' \ + '-D_DEBUG' \ + '-DV8_ENABLE_CHECKS' + +# Flags passed to all source files. +CFLAGS_Debug := \ + -O0 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Debug := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Debug := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Debug := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Debug := + +INCS_Debug := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +DEFS_Release := \ + '-DNODE_GYP_MODULE_NAME=bson' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-DBUILDING_NODE_EXTENSION' + +# Flags passed to all source files. +CFLAGS_Release := \ + -O3 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Release := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Release := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Release := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Release := + +INCS_Release := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +OBJS := \ + $(obj).target/$(TARGET)/ext/bson.o + +# Add to the list of files we specially track dependencies for. +all_deps += $(OBJS) + +# CFLAGS et al overrides must be target-local. +# See "Target-specific Variable Values" in the GNU Make manual. +$(OBJS): TOOLSET := $(TOOLSET) +$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) +$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) + +# Suffix rules, putting all outputs into $(obj). + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# Try building from generated source, too. + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# End of this set of suffix rules +### Rules for final target. +LDFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LDFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LIBS := + +$(builddir)/bson.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) +$(builddir)/bson.node: LIBS := $(LIBS) +$(builddir)/bson.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) +$(builddir)/bson.node: TOOLSET := $(TOOLSET) +$(builddir)/bson.node: $(OBJS) FORCE_DO_CMD + $(call do_cmd,solink_module) + +all_deps += $(builddir)/bson.node +# Add target alias +.PHONY: bson +bson: $(builddir)/bson.node + +# Short alias for building this executable. +.PHONY: bson.node +bson.node: $(builddir)/bson.node + +# Add executable to "all" target. +.PHONY: all +all: $(builddir)/bson.node + diff --git a/node_modules/bson/build/config.gypi b/node_modules/bson/build/config.gypi new file mode 100644 index 000000000..6a09d0a25 --- /dev/null +++ b/node_modules/bson/build/config.gypi @@ -0,0 +1,203 @@ +# Do not edit. File was generated by node-gyp's "configure" step +{ + "target_defaults": { + "cflags": [], + "default_configuration": "Release", + "defines": [], + "include_dirs": [], + "libraries": [] + }, + "variables": { + "asan": 0, + "build_v8_with_gn": "false", + "coverage": "false", + "dcheck_always_on": 0, + "debug_nghttp2": "false", + "debug_node": "false", + "enable_lto": "false", + "enable_pgo_generate": "false", + "enable_pgo_use": "false", + "error_on_warn": "false", + "force_dynamic_crt": 0, + "host_arch": "x64", + "icu_data_in": "../../deps/icu-tmp/icudt67l.dat", + "icu_endianness": "l", + "icu_gyp_path": "tools/icu/icu-generic.gyp", + "icu_path": "deps/icu-small", + "icu_small": "false", + "icu_ver_major": "67", + "is_debug": 0, + "llvm_version": "11.0", + "napi_build_version": "7", + "node_byteorder": "little", + "node_debug_lib": "false", + "node_enable_d8": "false", + "node_install_npm": "true", + "node_module_version": 83, + "node_no_browser_globals": "false", + "node_prefix": "/usr/local", + "node_release_urlbase": "https://nodejs.org/download/release/", + "node_shared": "false", + "node_shared_brotli": "false", + "node_shared_cares": "false", + "node_shared_http_parser": "false", + "node_shared_libuv": "false", + "node_shared_nghttp2": "false", + "node_shared_openssl": "false", + "node_shared_zlib": "false", + "node_tag": "", + "node_target_type": "executable", + "node_use_bundled_v8": "true", + "node_use_dtrace": "true", + "node_use_etw": "false", + "node_use_node_code_cache": "true", + "node_use_node_snapshot": "true", + "node_use_openssl": "true", + "node_use_v8_platform": "true", + "node_with_ltcg": "false", + "node_without_node_options": "false", + "openssl_fips": "", + "openssl_is_fips": "false", + "ossfuzz": "false", + "shlib_suffix": "83.dylib", + "target_arch": "x64", + "v8_enable_31bit_smis_on_64bit_arch": 0, + "v8_enable_gdbjit": 0, + "v8_enable_i18n_support": 1, + "v8_enable_inspector": 1, + "v8_enable_lite_mode": 0, + "v8_enable_object_print": 1, + "v8_enable_pointer_compression": 0, + "v8_no_strict_aliasing": 1, + "v8_optimized_debug": 1, + "v8_promise_internal_field_count": 1, + "v8_random_seed": 0, + "v8_trace_maps": 0, + "v8_use_siphash": 1, + "want_separate_host_toolset": 0, + "xcode_version": "11.0", + "nodedir": "/Users/tamtranht02/Library/Caches/node-gyp/14.15.4", + "standalone_static_library": 1, + "dry_run": "", + "legacy_bundling": "", + "save_dev": "", + "browser": "", + "commit_hooks": "true", + "only": "", + "viewer": "man", + "also": "", + "rollback": "true", + "sign_git_commit": "", + "audit": "true", + "usage": "", + "globalignorefile": "/usr/local/etc/npmignore", + "init_author_url": "", + "maxsockets": "50", + "shell": "/bin/bash", + "metrics_registry": "https://registry.npmjs.org/", + "parseable": "", + "shrinkwrap": "true", + "init_license": "ISC", + "timing": "", + "if_present": "", + "cache_max": "Infinity", + "init_author_email": "", + "sign_git_tag": "", + "cert": "", + "git_tag_version": "true", + "local_address": "", + "long": "", + "preid": "", + "fetch_retries": "2", + "registry": "https://registry.npmjs.org/", + "key": "", + "message": "%s", + "versions": "", + "globalconfig": "/usr/local/etc/npmrc", + "always_auth": "", + "logs_max": "10", + "prefer_online": "", + "cache_lock_retries": "10", + "global_style": "", + "update_notifier": "true", + "audit_level": "low", + "heading": "npm", + "fetch_retry_mintimeout": "10000", + "offline": "", + "read_only": "", + "searchlimit": "20", + "access": "", + "json": "", + "allow_same_version": "", + "description": "true", + "engine_strict": "", + "https_proxy": "", + "init_module": "/Users/tamtranht02/.npm-init.js", + "userconfig": "/Users/tamtranht02/.npmrc", + "cidr": "", + "node_version": "14.15.4", + "user": "", + "auth_type": "legacy", + "editor": "vi", + "ignore_prepublish": "", + "save": "true", + "script_shell": "", + "tag": "latest", + "before": "", + "global": "", + "progress": "true", + "ham_it_up": "", + "optional": "true", + "searchstaleness": "900", + "bin_links": "true", + "force": "", + "save_prod": "", + "searchopts": "", + "depth": "Infinity", + "node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js", + "rebuild_bundle": "true", + "sso_poll_frequency": "500", + "unicode": "true", + "fetch_retry_maxtimeout": "60000", + "ca": "", + "save_prefix": "^", + "scripts_prepend_node_path": "warn-only", + "sso_type": "oauth", + "strict_ssl": "true", + "tag_version_prefix": "v", + "dev": "", + "fetch_retry_factor": "10", + "group": "20", + "save_exact": "", + "cache_lock_stale": "60000", + "prefer_offline": "", + "version": "", + "cache_min": "10", + "otp": "", + "cache": "/Users/tamtranht02/.npm", + "searchexclude": "", + "color": "true", + "package_lock": "true", + "fund": "true", + "package_lock_only": "", + "save_optional": "", + "user_agent": "npm/6.14.10 node/v14.15.4 darwin x64", + "ignore_scripts": "", + "cache_lock_wait": "10000", + "production": "", + "save_bundle": "", + "send_metrics": "", + "init_version": "1.0.0", + "node_options": "", + "umask": "0022", + "scope": "", + "git": "git", + "init_author_name": "", + "onload_script": "", + "tmp": "/var/folders/gz/y49xvbws5fl_4hbmp6h870vh0000gn/T", + "unsafe_perm": "true", + "format_package_lock": "true", + "link": "", + "prefix": "/usr/local" + } +} diff --git a/node_modules/bson/build/gyp-mac-tool b/node_modules/bson/build/gyp-mac-tool new file mode 100755 index 000000000..033b4e538 --- /dev/null +++ b/node_modules/bson/build/gyp-mac-tool @@ -0,0 +1,615 @@ +#!/usr/bin/env python +# Generated by gyp. Do not edit. +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + +from __future__ import print_function + +import fcntl +import fnmatch +import glob +import json +import os +import plistlib +import re +import shutil +import string +import subprocess +import sys +import tempfile + +PY3 = bytes != str + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool(object): + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecCopyBundleResource(self, source, dest, convert_to_binary): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + # TODO(thakis): This copies file attributes like mtime, while the + # single-file branch below doesn't. This should probably be changed to + # be consistent with the single-file branch. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == '.xib': + return self._CopyXIBFile(source, dest) + elif extension == '.storyboard': + return self._CopyXIBFile(source, dest) + elif extension == '.strings': + self._CopyStringsFile(source, dest, convert_to_binary) + else: + shutil.copy(source, dest) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + + # ibtool sometimes crashes with relative paths. See crbug.com/314728. + base = os.path.dirname(os.path.realpath(__file__)) + if os.path.relpath(source): + source = os.path.join(base, source) + if os.path.relpath(dest): + dest = os.path.join(base, dest) + + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text', '--compile', dest, source] + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) + current_section_header = None + for line in ibtoolout.stdout: + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + sys.stdout.write(current_section_header) + current_section_header = None + sys.stdout.write(line) + return ibtoolout.returncode + + def _ConvertToBinary(self, dest): + subprocess.check_call([ + 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) + + def _CopyStringsFile(self, source, dest, convert_to_binary): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + s = open(source, 'rb').read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + + fp = open(dest, 'wb') + fp.write(s.decode(input_code).encode('UTF-16')) + fp.close() + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + fp = open(file_name, 'rb') + try: + header = fp.read(3) + except Exception: + fp.close() + return None + fp.close() + if header.startswith("\xFE\xFF"): + return "UTF-16" + elif header.startswith("\xFF\xFE"): + return "UTF-16" + elif header.startswith("\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + fd = open(source, 'r') + lines = fd.read() + fd.close() + + # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). + plist = plistlib.readPlistFromString(lines) + if keys: + plist = dict(plist.items() + json.loads(keys[0]).items()) + lines = plistlib.writePlistToString(plist) + + # Go through all the environment variables and replace them as variables in + # the file. + IDENT_RE = re.compile(r'[/\s]') + for key in os.environ: + if key.startswith('_'): + continue + evar = '${%s}' % key + evalue = os.environ[key] + lines = string.replace(lines, evar, evalue) + + # Xcode supports various suffices on environment variables, which are + # all undocumented. :rfc1034identifier is used in the standard project + # template these days, and :identifier was used earlier. They are used to + # convert non-url characters into things that look like valid urls -- + # except that the replacement character for :identifier, '_' isn't valid + # in a URL either -- oops, hence :rfc1034identifier was born. + evar = '${%s:identifier}' % key + evalue = IDENT_RE.sub('_', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + evar = '${%s:rfc1034identifier}' % key + evalue = IDENT_RE.sub('-', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + # Remove any keys with values that haven't been replaced. + lines = lines.split('\n') + for i in range(len(lines)): + if lines[i].strip().startswith("${"): + lines[i] = None + lines[i - 1] = None + lines = '\n'.join(filter(lambda x: x is not None, lines)) + + # Write out the file with variables replaced. + fd = open(dest, 'w') + fd.write(lines) + fd.close() + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: # Wrong length resets everything, too. + signature_code = '?' * 4 + + dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') + fp = open(dest, 'w') + fp.write('%s%s' % (package_type, signature_code)) + fp.close() + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out '/path/to/libtool: file: foo.o has no + symbols'.""" + libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re5 = re.compile( + r'^.*libtool: warning for library: ' + + r'.* the table of contents is empty ' + + r'\(no object file members in the library define global symbols\)$') + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env['ZERO_AR_DATE'] = '1' + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + if PY3: + err = err.decode('utf-8') + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print(line, file=sys.stderr) + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): + os.utime(cmd_list[i+1], None) + break + return libtoolout.returncode + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + def ExecCompileXcassets(self, keys, *inputs): + """Compiles multiple .xcassets files into a single .car file. + + This invokes 'actool' to compile all the inputs .xcassets files. The + |keys| arguments is a json-encoded dictionary of extra arguments to + pass to 'actool' when the asset catalogs contains an application icon + or a launch image. + + Note that 'actool' does not create the Assets.car file if the asset + catalogs does not contains imageset. + """ + command_line = [ + 'xcrun', 'actool', '--output-format', 'human-readable-text', + '--compress-pngs', '--notices', '--warnings', '--errors', + ] + is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ + if is_iphone_target: + platform = os.environ['CONFIGURATION'].split('-')[-1] + if platform not in ('iphoneos', 'iphonesimulator'): + platform = 'iphonesimulator' + command_line.extend([ + '--platform', platform, '--target-device', 'iphone', + '--target-device', 'ipad', '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', + os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), + ]) + else: + command_line.extend([ + '--platform', 'macosx', '--target-device', 'mac', + '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], + '--compile', + os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), + ]) + if keys: + keys = json.loads(keys) + for key, value in keys.items(): + arg_name = '--' + key + if isinstance(value, bool): + if value: + command_line.append(arg_name) + elif isinstance(value, list): + for v in value: + command_line.append(arg_name) + command_line.append(str(v)) + else: + command_line.append(arg_name) + command_line.append(str(value)) + # Note: actool crashes if inputs path are relative, so use os.path.abspath + # to get absolute path name for inputs. + command_line.extend(map(os.path.abspath, inputs)) + subprocess.check_call(command_line) + + def ExecMergeInfoPlist(self, output, *inputs): + """Merge multiple .plist files into a single .plist file.""" + merged_plist = {} + for path in inputs: + plist = self._LoadPlistMaybeBinary(path) + self._MergePlist(merged_plist, plist) + plistlib.writePlist(merged_plist, output) + + def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. copy ResourceRules.plist from the user or the SDK into the bundle, + 2. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 3. copy Entitlements.plist from user or SDK next to the bundle, + 4. code sign the bundle. + """ + resource_rules_path = self._InstallResourceRules(resource_rules) + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier()) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides) + subprocess.check_call([ + 'codesign', '--force', '--sign', key, '--resource-rules', + resource_rules_path, '--entitlements', entitlements_path, + os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['FULL_PRODUCT_NAME'])]) + + def _InstallResourceRules(self, resource_rules): + """Installs ResourceRules.plist from user or SDK into the bundle. + + Args: + resource_rules: string, optional, path to the ResourceRules.plist file + to use, default to "${SDKROOT}/ResourceRules.plist" + + Returns: + Path to the copy of ResourceRules.plist into the bundle. + """ + source_path = resource_rules + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'ResourceRules.plist') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], 'ResourceRules.plist') + shutil.copy2(source_path, target_path) + return target_path + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier) + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'embedded.mobileprovision') + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') + return substitutions, provisioning_data['Entitlements'] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + if not os.path.isdir(profiles_dir): + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, '*.mobileprovision')) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get( + 'Entitlements', {}).get('application-identifier', '') + for team_identifier in profile_data.get('TeamIdentifier', []): + app_id = '%s.%s' % (team_identifier, bundle_identifier) + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, profile_data, team_identifier) + if not valid_provisioning_profiles: + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call([ + 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) + return self._LoadPlistMaybeBinary(temp.name) + + def _MergePlist(self, merged_plist, plist): + """Merge |plist| into |merged_plist|.""" + for key, value in plist.items(): + if isinstance(value, dict): + merged_value = merged_plist.get(key, {}) + if isinstance(merged_value, dict): + self._MergePlist(merged_value, value) + merged_plist[key] = merged_value + else: + merged_plist[key] = value + else: + merged_plist[key] = value + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['INFOPLIST_PATH']) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data['CFBundleIdentifier'] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['PRODUCT_NAME'] + '.xcent') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], + 'Entitlements.plist') + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.items(): + data = data.replace('$(%s)' % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return {k: self._ExpandVariables(data[k], substitutions) for k in data} + return data + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/mongodb/node_modules/bson/build_browser.js b/node_modules/bson/build_browser.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/build_browser.js rename to node_modules/bson/build_browser.js diff --git a/node_modules/mongodb/node_modules/bson/ext/Makefile b/node_modules/bson/ext/Makefile similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/Makefile rename to node_modules/bson/ext/Makefile diff --git a/node_modules/mongodb/node_modules/bson/ext/bson.cc b/node_modules/bson/ext/bson.cc similarity index 97% rename from node_modules/mongodb/node_modules/bson/ext/bson.cc rename to node_modules/bson/ext/bson.cc index 422fc3ee6..1ec303163 100644 --- a/node_modules/mongodb/node_modules/bson/ext/bson.cc +++ b/node_modules/bson/ext/bson.cc @@ -1,1045 +1,1045 @@ -//=========================================================================== - -#include -#include -#include -#include -#include - -#ifdef __clang__ -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunused-parameter" -#endif - -#include - -// this and the above block must be around the v8.h header otherwise -// v8 is not happy -#ifdef __clang__ -#pragma clang diagnostic pop -#endif - -#include -#include -#include - -#include -#include -#include -#include - -#ifdef __sun - #include -#endif - -#include "bson.h" - -using namespace v8; -using namespace node; - -//=========================================================================== - -void DataStream::WriteObjectId(const Handle& object, const Handle& key) -{ - uint16_t buffer[12]; - object->Get(key)->ToString()->Write(buffer, 0, 12); - for(uint32_t i = 0; i < 12; ++i) - { - *p++ = (char) buffer[i]; - } -} - -void ThrowAllocatedStringException(size_t allocationSize, const char* format, ...) -{ - va_list args; - va_start(args, format); - char* string = (char*) malloc(allocationSize); - vsprintf(string, format, args); - va_end(args); - - throw string; -} - -void DataStream::CheckKey(const Local& keyName) -{ - size_t keyLength = keyName->Utf8Length(); - if(keyLength == 0) return; - - // Allocate space for the key, do not need to zero terminate as WriteUtf8 does it - char* keyStringBuffer = (char*) alloca(keyLength + 1); - // Write the key to the allocated buffer - keyName->WriteUtf8(keyStringBuffer); - // Check for the zero terminator - char* terminator = strchr(keyStringBuffer, 0x00); - - // If the location is not at the end of the string we've got an illegal 0x00 byte somewhere - if(terminator != &keyStringBuffer[keyLength]) { - ThrowAllocatedStringException(64+keyLength, "key %s must not contain null bytes", keyStringBuffer); - } - - if(keyStringBuffer[0] == '$') - { - ThrowAllocatedStringException(64+keyLength, "key %s must not start with '$'", keyStringBuffer); - } - - if(strchr(keyStringBuffer, '.') != NULL) - { - ThrowAllocatedStringException(64+keyLength, "key %s must not contain '.'", keyStringBuffer); - } -} - -template void BSONSerializer::SerializeDocument(const Handle& value) -{ - void* documentSize = this->BeginWriteSize(); - Local object = bson->GetSerializeObject(value); - - // Get the object property names - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 - Local propertyNames = object->GetPropertyNames(); - #else - Local propertyNames = object->GetOwnPropertyNames(); - #endif - - // Length of the property - int propertyLength = propertyNames->Length(); - for(int i = 0; i < propertyLength; ++i) - { - const Local& propertyName = propertyNames->Get(i)->ToString(); - if(checkKeys) this->CheckKey(propertyName); - - const Local& propertyValue = object->Get(propertyName); - - if(serializeFunctions || !propertyValue->IsFunction()) - { - void* typeLocation = this->BeginWriteType(); - this->WriteString(propertyName); - SerializeValue(typeLocation, propertyValue); - } - } - - this->WriteByte(0); - this->CommitSize(documentSize); -} - -template void BSONSerializer::SerializeArray(const Handle& value) -{ - void* documentSize = this->BeginWriteSize(); - - Local array = Local::Cast(value->ToObject()); - uint32_t arrayLength = array->Length(); - - for(uint32_t i = 0; i < arrayLength; ++i) - { - void* typeLocation = this->BeginWriteType(); - this->WriteUInt32String(i); - SerializeValue(typeLocation, array->Get(i)); - } - - this->WriteByte(0); - this->CommitSize(documentSize); -} - -// This is templated so that we can use this function to both count the number of bytes, and to serialize those bytes. -// The template approach eliminates almost all of the inspection of values unless they're required (eg. string lengths) -// and ensures that there is always consistency between bytes counted and bytes written by design. -template void BSONSerializer::SerializeValue(void* typeLocation, const Handle& value) -{ - if(value->IsNumber()) - { - double doubleValue = value->NumberValue(); - int intValue = (int) doubleValue; - if(intValue == doubleValue) - { - this->CommitType(typeLocation, BSON_TYPE_INT); - this->WriteInt32(intValue); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_NUMBER); - this->WriteDouble(doubleValue); - } - } - else if(value->IsString()) - { - this->CommitType(typeLocation, BSON_TYPE_STRING); - this->WriteLengthPrefixedString(value->ToString()); - } - else if(value->IsBoolean()) - { - this->CommitType(typeLocation, BSON_TYPE_BOOLEAN); - this->WriteBool(value); - } - else if(value->IsArray()) - { - this->CommitType(typeLocation, BSON_TYPE_ARRAY); - SerializeArray(value); - } - else if(value->IsDate()) - { - this->CommitType(typeLocation, BSON_TYPE_DATE); - this->WriteInt64(value); - } - else if(value->IsRegExp()) - { - this->CommitType(typeLocation, BSON_TYPE_REGEXP); - const Handle& regExp = Handle::Cast(value); - - this->WriteString(regExp->GetSource()); - - int flags = regExp->GetFlags(); - if(flags & RegExp::kGlobal) this->WriteByte('s'); - if(flags & RegExp::kIgnoreCase) this->WriteByte('i'); - if(flags & RegExp::kMultiline) this->WriteByte('m'); - this->WriteByte(0); - } - else if(value->IsFunction()) - { - this->CommitType(typeLocation, BSON_TYPE_CODE); - this->WriteLengthPrefixedString(value->ToString()); - } - else if(value->IsObject()) - { - const Local& object = value->ToObject(); - if(object->Has(NanPersistentToLocal(bson->_bsontypeString))) - { - const Local& constructorString = object->GetConstructorName(); - if(NanPersistentToLocal(bson->longString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_LONG); - this->WriteInt32(object, NanPersistentToLocal(bson->_longLowString)); - this->WriteInt32(object, NanPersistentToLocal(bson->_longHighString)); - } - else if(NanPersistentToLocal(bson->timestampString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_TIMESTAMP); - this->WriteInt32(object, NanPersistentToLocal(bson->_longLowString)); - this->WriteInt32(object, NanPersistentToLocal(bson->_longHighString)); - } - else if(NanPersistentToLocal(bson->objectIDString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_OID); - this->WriteObjectId(object, NanPersistentToLocal(bson->_objectIDidString)); - } - else if(NanPersistentToLocal(bson->binaryString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_BINARY); - - uint32_t length = object->Get(NanPersistentToLocal(bson->_binaryPositionString))->Uint32Value(); - Local bufferObj = object->Get(NanPersistentToLocal(bson->_binaryBufferString))->ToObject(); - - this->WriteInt32(length); - this->WriteByte(object, NanPersistentToLocal(bson->_binarySubTypeString)); // write subtype - // If type 0x02 write the array length aswell - if(object->Get(NanPersistentToLocal(bson->_binarySubTypeString))->Int32Value() == 0x02) { - this->WriteInt32(length); - } - // Write the actual data - this->WriteData(Buffer::Data(bufferObj), length); - } - else if(NanPersistentToLocal(bson->doubleString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_NUMBER); - this->WriteDouble(object, NanPersistentToLocal(bson->_doubleValueString)); - } - else if(NanPersistentToLocal(bson->symbolString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_SYMBOL); - this->WriteLengthPrefixedString(object->Get(NanPersistentToLocal(bson->_symbolValueString))->ToString()); - } - else if(NanPersistentToLocal(bson->codeString)->StrictEquals(constructorString)) - { - const Local& function = object->Get(NanPersistentToLocal(bson->_codeCodeString))->ToString(); - const Local& scope = object->Get(NanPersistentToLocal(bson->_codeScopeString))->ToObject(); - - // For Node < 0.6.X use the GetPropertyNames - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 - uint32_t propertyNameLength = scope->GetPropertyNames()->Length(); - #else - uint32_t propertyNameLength = scope->GetOwnPropertyNames()->Length(); - #endif - - if(propertyNameLength > 0) - { - this->CommitType(typeLocation, BSON_TYPE_CODE_W_SCOPE); - void* codeWidthScopeSize = this->BeginWriteSize(); - this->WriteLengthPrefixedString(function->ToString()); - SerializeDocument(scope); - this->CommitSize(codeWidthScopeSize); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_CODE); - this->WriteLengthPrefixedString(function->ToString()); - } - } - else if(NanPersistentToLocal(bson->dbrefString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_OBJECT); - - void* dbRefSize = this->BeginWriteSize(); - - void* refType = this->BeginWriteType(); - this->WriteData("$ref", 5); - SerializeValue(refType, object->Get(NanPersistentToLocal(bson->_dbRefNamespaceString))); - - void* idType = this->BeginWriteType(); - this->WriteData("$id", 4); - SerializeValue(idType, object->Get(NanPersistentToLocal(bson->_dbRefOidString))); - - const Local& refDbValue = object->Get(NanPersistentToLocal(bson->_dbRefDbString)); - if(!refDbValue->IsUndefined()) - { - void* dbType = this->BeginWriteType(); - this->WriteData("$db", 4); - SerializeValue(dbType, refDbValue); - } - - this->WriteByte(0); - this->CommitSize(dbRefSize); - } - else if(NanPersistentToLocal(bson->minKeyString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_MIN_KEY); - } - else if(NanPersistentToLocal(bson->maxKeyString)->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_MAX_KEY); - } - } - else if(Buffer::HasInstance(value)) - { - this->CommitType(typeLocation, BSON_TYPE_BINARY); - - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Local buffer = ObjectWrap::Unwrap(value->ToObject()); - uint32_t length = object->length(); - #else - uint32_t length = Buffer::Length(value->ToObject()); - #endif - - this->WriteInt32(length); - this->WriteByte(0); - this->WriteData(Buffer::Data(value->ToObject()), length); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_OBJECT); - SerializeDocument(value); - } - } - else if(value->IsNull() || value->IsUndefined()) - { - this->CommitType(typeLocation, BSON_TYPE_NULL); - } -} - -// Data points to start of element list, length is length of entire document including '\0' but excluding initial size -BSONDeserializer::BSONDeserializer(BSON* aBson, char* data, size_t length) -: bson(aBson), - pStart(data), - p(data), - pEnd(data + length - 1) -{ - if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); -} - -BSONDeserializer::BSONDeserializer(BSONDeserializer& parentSerializer, size_t length) -: bson(parentSerializer.bson), - pStart(parentSerializer.p), - p(parentSerializer.p), - pEnd(parentSerializer.p + length - 1) -{ - parentSerializer.p += length; - if(pEnd > parentSerializer.pEnd) ThrowAllocatedStringException(64, "Child document exceeds parent's bounds"); - if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); -} - -Handle BSONDeserializer::ReadCString() -{ - char* start = p; - while(*p++ && (p < pEnd)) { } - if(p > pEnd) { - return Null(); - } - return String::New(start, (int32_t) (p-start-1) ); -} - -int32_t BSONDeserializer::ReadRegexOptions() -{ - int32_t options = 0; - for(;;) - { - switch(*p++) - { - case '\0': return options; - case 's': options |= RegExp::kGlobal; break; - case 'i': options |= RegExp::kIgnoreCase; break; - case 'm': options |= RegExp::kMultiline; break; - } - } -} - -uint32_t BSONDeserializer::ReadIntegerString() -{ - uint32_t value = 0; - while(*p) - { - if(*p < '0' || *p > '9') ThrowAllocatedStringException(64, "Invalid key for array"); - value = value * 10 + *p++ - '0'; - } - ++p; - return value; -} - -Local BSONDeserializer::ReadString() -{ - uint32_t length = ReadUInt32(); - char* start = p; - p += length; - return String::New(start, length-1); -} - -Local BSONDeserializer::ReadObjectId() -{ - uint16_t objectId[12]; - for(size_t i = 0; i < 12; ++i) - { - objectId[i] = *reinterpret_cast(p++); - } - return String::New(objectId, 12); -} - -Handle BSONDeserializer::DeserializeDocument(bool promoteLongs) -{ - uint32_t length = ReadUInt32(); - if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Document is less than 5 bytes"); - - BSONDeserializer documentDeserializer(*this, length-4); - return documentDeserializer.DeserializeDocumentInternal(promoteLongs); -} - -Handle BSONDeserializer::DeserializeDocumentInternal(bool promoteLongs) -{ - Local returnObject = Object::New(); - - while(HasMoreData()) - { - BsonType type = (BsonType) ReadByte(); - const Handle& name = ReadCString(); - if(name->IsNull()) ThrowAllocatedStringException(64, "Bad BSON Document: illegal CString"); - // name->Is - const Handle& value = DeserializeValue(type, promoteLongs); - returnObject->ForceSet(name, value); - } - if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Document: Serialize consumed unexpected number of bytes"); - - // From JavaScript: - // if(object['$id'] != null) object = new DBRef(object['$ref'], object['$id'], object['$db']); - if(returnObject->Has(NanPersistentToLocal(bson->_dbRefIdRefString))) - { - Local argv[] = { returnObject->Get(NanPersistentToLocal(bson->_dbRefRefString)), returnObject->Get(NanPersistentToLocal(bson->_dbRefIdRefString)), returnObject->Get(NanPersistentToLocal(bson->_dbRefDbRefString)) }; - return NanPersistentToLocal(bson->dbrefConstructor)->NewInstance(3, argv); - } - else - { - return returnObject; - } -} - -Handle BSONDeserializer::DeserializeArray(bool promoteLongs) -{ - uint32_t length = ReadUInt32(); - if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Array Document is less than 5 bytes"); - - BSONDeserializer documentDeserializer(*this, length-4); - return documentDeserializer.DeserializeArrayInternal(promoteLongs); -} - -Handle BSONDeserializer::DeserializeArrayInternal(bool promoteLongs) -{ - Local returnArray = Array::New(); - - while(HasMoreData()) - { - BsonType type = (BsonType) ReadByte(); - uint32_t index = ReadIntegerString(); - const Handle& value = DeserializeValue(type, promoteLongs); - returnArray->Set(index, value); - } - if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Array: Serialize consumed unexpected number of bytes"); - - return returnArray; -} - -Handle BSONDeserializer::DeserializeValue(BsonType type, bool promoteLongs) -{ - switch(type) - { - case BSON_TYPE_STRING: - return ReadString(); - - case BSON_TYPE_INT: - return Integer::New(ReadInt32()); - - case BSON_TYPE_NUMBER: - return Number::New(ReadDouble()); - - case BSON_TYPE_NULL: - return Null(); - - case BSON_TYPE_UNDEFINED: - return Undefined(); - - case BSON_TYPE_TIMESTAMP: - { - int32_t lowBits = ReadInt32(); - int32_t highBits = ReadInt32(); - Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; - return NanPersistentToLocal(bson->timestampConstructor)->NewInstance(2, argv); - } - - case BSON_TYPE_BOOLEAN: - return (ReadByte() != 0) ? True() : False(); - - case BSON_TYPE_REGEXP: - { - const Handle& regex = ReadCString(); - if(regex->IsNull()) ThrowAllocatedStringException(64, "Bad BSON Document: illegal CString"); - int32_t options = ReadRegexOptions(); - return RegExp::New(regex->ToString(), (RegExp::Flags) options); - } - - case BSON_TYPE_CODE: - { - const Local& code = ReadString(); - const Local& scope = Object::New(); - Local argv[] = { code, scope }; - return NanPersistentToLocal(bson->codeConstructor)->NewInstance(2, argv); - } - - case BSON_TYPE_CODE_W_SCOPE: - { - ReadUInt32(); - const Local& code = ReadString(); - const Handle& scope = DeserializeDocument(promoteLongs); - Local argv[] = { code, scope->ToObject() }; - return NanPersistentToLocal(bson->codeConstructor)->NewInstance(2, argv); - } - - case BSON_TYPE_OID: - { - Local argv[] = { ReadObjectId() }; - return NanPersistentToLocal(bson->objectIDConstructor)->NewInstance(1, argv); - } - - case BSON_TYPE_BINARY: - { - uint32_t length = ReadUInt32(); - uint32_t subType = ReadByte(); - if(subType == 0x02) { - length = ReadInt32(); - } - - Local buffer = NanNewBufferHandle(p, length); - p += length; - - Handle argv[] = { buffer, Uint32::New(subType) }; - return NanPersistentToLocal(bson->binaryConstructor)->NewInstance(2, argv); - } - - case BSON_TYPE_LONG: - { - // Read 32 bit integers - int32_t lowBits = (int32_t) ReadInt32(); - int32_t highBits = (int32_t) ReadInt32(); - - // Promote long is enabled - if(promoteLongs) { - // If value is < 2^53 and >-2^53 - if((highBits < 0x200000 || (highBits == 0x200000 && lowBits == 0)) && highBits >= -0x200000) { - // Adjust the pointer and read as 64 bit value - p -= 8; - // Read the 64 bit value - int64_t finalValue = (int64_t) ReadInt64(); - return Number::New(finalValue); - } - } - - // Decode the Long value - Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; - return NanPersistentToLocal(bson->longConstructor)->NewInstance(2, argv); - } - - case BSON_TYPE_DATE: - return Date::New((double) ReadInt64()); - - case BSON_TYPE_ARRAY: - return DeserializeArray(promoteLongs); - - case BSON_TYPE_OBJECT: - return DeserializeDocument(promoteLongs); - - case BSON_TYPE_SYMBOL: - { - const Local& string = ReadString(); - Local argv[] = { string }; - return NanPersistentToLocal(bson->symbolConstructor)->NewInstance(1, argv); - } - - case BSON_TYPE_MIN_KEY: - return NanPersistentToLocal(bson->minKeyConstructor)->NewInstance(); - - case BSON_TYPE_MAX_KEY: - return NanPersistentToLocal(bson->maxKeyConstructor)->NewInstance(); - - default: - ThrowAllocatedStringException(64, "Unhandled BSON Type: %d", type); - } - - return v8::Null(); -} - -Persistent BSON::constructor_template; - -BSON::BSON() : ObjectWrap() -{ - // Setup pre-allocated comparision objects - NanAssignPersistent(String, _bsontypeString, String::New("_bsontype")); - NanAssignPersistent(String, _longLowString, String::New("low_")); - NanAssignPersistent(String, _longHighString, String::New("high_")); - NanAssignPersistent(String, _objectIDidString, String::New("id")); - NanAssignPersistent(String, _binaryPositionString, String::New("position")); - NanAssignPersistent(String, _binarySubTypeString, String::New("sub_type")); - NanAssignPersistent(String, _binaryBufferString, String::New("buffer")); - NanAssignPersistent(String, _doubleValueString, String::New("value")); - NanAssignPersistent(String, _symbolValueString, String::New("value")); - NanAssignPersistent(String, _dbRefRefString, String::New("$ref")); - NanAssignPersistent(String, _dbRefIdRefString, String::New("$id")); - NanAssignPersistent(String, _dbRefDbRefString, String::New("$db")); - NanAssignPersistent(String, _dbRefNamespaceString, String::New("namespace")); - NanAssignPersistent(String, _dbRefDbString, String::New("db")); - NanAssignPersistent(String, _dbRefOidString, String::New("oid")); - NanAssignPersistent(String, _codeCodeString, String::New("code")); - NanAssignPersistent(String, _codeScopeString, String::New("scope")); - NanAssignPersistent(String, _toBSONString, String::New("toBSON")); - - NanAssignPersistent(String, longString, String::New("Long")); - NanAssignPersistent(String, objectIDString, String::New("ObjectID")); - NanAssignPersistent(String, binaryString, String::New("Binary")); - NanAssignPersistent(String, codeString, String::New("Code")); - NanAssignPersistent(String, dbrefString, String::New("DBRef")); - NanAssignPersistent(String, symbolString, String::New("Symbol")); - NanAssignPersistent(String, doubleString, String::New("Double")); - NanAssignPersistent(String, timestampString, String::New("Timestamp")); - NanAssignPersistent(String, minKeyString, String::New("MinKey")); - NanAssignPersistent(String, maxKeyString, String::New("MaxKey")); -} - -void BSON::Initialize(v8::Handle target) -{ - // Grab the scope of the call from Node - NanScope(); - // Define a new function template - Local t = FunctionTemplate::New(New); - t->InstanceTemplate()->SetInternalFieldCount(1); - t->SetClassName(String::NewSymbol("BSON")); - - // Instance methods - NODE_SET_PROTOTYPE_METHOD(t, "calculateObjectSize", CalculateObjectSize); - NODE_SET_PROTOTYPE_METHOD(t, "serialize", BSONSerialize); - NODE_SET_PROTOTYPE_METHOD(t, "serializeWithBufferAndIndex", SerializeWithBufferAndIndex); - NODE_SET_PROTOTYPE_METHOD(t, "deserialize", BSONDeserialize); - NODE_SET_PROTOTYPE_METHOD(t, "deserializeStream", BSONDeserializeStream); - - NanAssignPersistent(FunctionTemplate, constructor_template, t); - - target->ForceSet(String::NewSymbol("BSON"), t->GetFunction()); -} - -// Create a new instance of BSON and passing it the existing context -NAN_METHOD(BSON::New) -{ - NanScope(); - - // Check that we have an array - if(args.Length() == 1 && args[0]->IsArray()) - { - // Cast the array to a local reference - Local array = Local::Cast(args[0]); - - if(array->Length() > 0) - { - // Create a bson object instance and return it - BSON *bson = new BSON(); - - uint32_t foundClassesMask = 0; - - // Iterate over all entries to save the instantiate funtions - for(uint32_t i = 0; i < array->Length(); i++) { - // Let's get a reference to the function - Local func = Local::Cast(array->Get(i)); - Local functionName = func->GetName()->ToString(); - - // Save the functions making them persistant handles (they don't get collected) - if(functionName->StrictEquals(NanPersistentToLocal(bson->longString))) { - NanAssignPersistent(Function, bson->longConstructor, func); - foundClassesMask |= 1; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->objectIDString))) { - NanAssignPersistent(Function, bson->objectIDConstructor, func); - foundClassesMask |= 2; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->binaryString))) { - NanAssignPersistent(Function, bson->binaryConstructor, func); - foundClassesMask |= 4; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->codeString))) { - NanAssignPersistent(Function, bson->codeConstructor, func); - foundClassesMask |= 8; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->dbrefString))) { - NanAssignPersistent(Function, bson->dbrefConstructor, func); - foundClassesMask |= 0x10; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->symbolString))) { - NanAssignPersistent(Function, bson->symbolConstructor, func); - foundClassesMask |= 0x20; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->doubleString))) { - NanAssignPersistent(Function, bson->doubleConstructor, func); - foundClassesMask |= 0x40; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->timestampString))) { - NanAssignPersistent(Function, bson->timestampConstructor, func); - foundClassesMask |= 0x80; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->minKeyString))) { - NanAssignPersistent(Function, bson->minKeyConstructor, func); - foundClassesMask |= 0x100; - } else if(functionName->StrictEquals(NanPersistentToLocal(bson->maxKeyString))) { - NanAssignPersistent(Function, bson->maxKeyConstructor, func); - foundClassesMask |= 0x200; - } - } - - // Check if we have the right number of constructors otherwise throw an error - if(foundClassesMask != 0x3ff) { - delete bson; - return NanThrowError("Missing function constructor for either [Long/ObjectID/Binary/Code/DbRef/Symbol/Double/Timestamp/MinKey/MaxKey]"); - } else { - bson->Wrap(args.This()); - NanReturnValue(args.This()); - } - } - else - { - return NanThrowError("No types passed in"); - } - } - else - { - return NanThrowTypeError("Argument passed in must be an array of types"); - } -} - -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ - -NAN_METHOD(BSON::BSONDeserialize) -{ - NanScope(); - - // Fail if the first argument is not a string or a buffer - if(args.Length() > 1 && !args[0]->IsString() && !Buffer::HasInstance(args[0])) - return NanThrowError("First Argument must be a Buffer or String."); - - // Promote longs - bool promoteLongs = true; - - // If we have an options object - if(args.Length() == 2 && args[1]->IsObject()) { - Local options = args[1]->ToObject(); - - if(options->Has(String::New("promoteLongs"))) { - promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); - } - } - - // Define pointer to data - Local obj = args[0]->ToObject(); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // If we passed in a buffer, let's unpack it, otherwise let's unpack the string - if(Buffer::HasInstance(obj)) - { -#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Local buffer = ObjectWrap::Unwrap(obj); - char* data = buffer->data(); - size_t length = buffer->length(); -#else - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); -#endif - - // Validate that we have at least 5 bytes - if(length < 5) return NanThrowError("corrupt bson message < 5 bytes long"); - - try - { - BSONDeserializer deserializer(bson, data, length); - // deserializer.promoteLongs = promoteLongs; - NanReturnValue(deserializer.DeserializeDocument(promoteLongs)); - } - catch(char* exception) - { - Local error = String::New(exception); - free(exception); - return NanThrowError(error); - } - - } - else - { - // The length of the data for this encoding - ssize_t len = DecodeBytes(args[0], BINARY); - - // Validate that we have at least 5 bytes - if(len < 5) return NanThrowError("corrupt bson message < 5 bytes long"); - - // Let's define the buffer size - char* data = (char *)malloc(len); - DecodeWrite(data, len, args[0], BINARY); - - try - { - BSONDeserializer deserializer(bson, data, len); - // deserializer.promoteLongs = promoteLongs; - Handle result = deserializer.DeserializeDocument(promoteLongs); - free(data); - NanReturnValue(result); - - } - catch(char* exception) - { - Local error = String::New(exception); - free(exception); - free(data); - return NanThrowError(error); - } - } -} - -Local BSON::GetSerializeObject(const Handle& argValue) -{ - Local object = argValue->ToObject(); - if(object->Has(NanPersistentToLocal(_toBSONString))) - { - const Local& toBSON = object->Get(NanPersistentToLocal(_toBSONString)); - if(!toBSON->IsFunction()) ThrowAllocatedStringException(64, "toBSON is not a function"); - - Local result = Local::Cast(toBSON)->Call(object, 0, NULL); - if(!result->IsObject()) ThrowAllocatedStringException(64, "toBSON function did not return an object"); - return result->ToObject(); - } - else - { - return object; - } -} - -NAN_METHOD(BSON::BSONSerialize) -{ - NanScope(); - - if(args.Length() == 1 && !args[0]->IsObject()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 3 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean() && !args[3]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); - if(args.Length() > 4) return NanThrowError("One, two, tree or four arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); - - // Check if we have an array as the object - if(args[0]->IsArray()) return NanThrowError("Only javascript objects supported"); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // Calculate the total size of the document in binary form to ensure we only allocate memory once - // With serialize function - bool serializeFunctions = (args.Length() >= 4) && args[3]->BooleanValue(); - - char *serialized_object = NULL; - size_t object_size; - try - { - Local object = bson->GetSerializeObject(args[0]); - - BSONSerializer counter(bson, false, serializeFunctions); - counter.SerializeDocument(object); - object_size = counter.GetSerializeSize(); - - // Allocate the memory needed for the serialization - serialized_object = (char *)malloc(object_size); - - // Check if we have a boolean value - bool checkKeys = args.Length() >= 3 && args[1]->IsBoolean() && args[1]->BooleanValue(); - BSONSerializer data(bson, checkKeys, serializeFunctions, serialized_object); - data.SerializeDocument(object); - } - catch(char *err_msg) - { - free(serialized_object); - Local error = String::New(err_msg); - free(err_msg); - return NanThrowError(error); - } - - // If we have 3 arguments - if(args.Length() == 3 || args.Length() == 4) - { - Local buffer = NanNewBufferHandle(serialized_object, object_size); - free(serialized_object); - NanReturnValue(buffer); - } - else - { - Local bin_value = Encode(serialized_object, object_size, BINARY)->ToString(); - free(serialized_object); - NanReturnValue(bin_value); - } -} - -NAN_METHOD(BSON::CalculateObjectSize) -{ - NanScope(); - // Ensure we have a valid object - if(args.Length() == 1 && !args[0]->IsObject()) return NanThrowError("One argument required - [object]"); - if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return NanThrowError("Two arguments required - [object, boolean]"); - if(args.Length() > 3) return NanThrowError("One or two arguments required - [object] or [object, boolean]"); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - bool serializeFunctions = (args.Length() >= 2) && args[1]->BooleanValue(); - BSONSerializer countSerializer(bson, false, serializeFunctions); - countSerializer.SerializeDocument(args[0]); - - // Return the object size - NanReturnValue(Uint32::New((uint32_t) countSerializer.GetSerializeSize())); -} - -NAN_METHOD(BSON::SerializeWithBufferAndIndex) -{ - NanScope(); - - //BSON.serializeWithBufferAndIndex = function serializeWithBufferAndIndex(object, ->, buffer, index) { - // Ensure we have the correct values - if(args.Length() > 5) return NanThrowError("Four or five parameters required [object, boolean, Buffer, int] or [object, boolean, Buffer, int, boolean]"); - if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32()) return NanThrowError("Four parameters required [object, boolean, Buffer, int]"); - if(args.Length() == 5 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32() && !args[4]->IsBoolean()) return NanThrowError("Four parameters required [object, boolean, Buffer, int, boolean]"); - - uint32_t index; - size_t object_size; - - try - { - BSON *bson = ObjectWrap::Unwrap(args.This()); - - Local obj = args[2]->ToObject(); - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); - - index = args[3]->Uint32Value(); - bool checkKeys = args.Length() >= 4 && args[1]->IsBoolean() && args[1]->BooleanValue(); - bool serializeFunctions = (args.Length() == 5) && args[4]->BooleanValue(); - - BSONSerializer dataSerializer(bson, checkKeys, serializeFunctions, data+index); - dataSerializer.SerializeDocument(bson->GetSerializeObject(args[0])); - object_size = dataSerializer.GetSerializeSize(); - - if(object_size + index > length) return NanThrowError("Serious error - overflowed buffer!!"); - } - catch(char *exception) - { - Local error = String::New(exception); - free(exception); - return NanThrowError(error); - } - - NanReturnValue(Uint32::New((uint32_t) (index + object_size - 1))); -} - -NAN_METHOD(BSON::BSONDeserializeStream) -{ - NanScope(); - - // At least 3 arguments required - if(args.Length() < 5) return NanThrowError("Arguments required (Buffer(data), Number(index in data), Number(number of documents to deserialize), Array(results), Number(index in the array), Object(optional))"); - - // If the number of argumets equals 3 - if(args.Length() >= 5) - { - if(!Buffer::HasInstance(args[0])) return NanThrowError("First argument must be Buffer instance"); - if(!args[1]->IsUint32()) return NanThrowError("Second argument must be a positive index number"); - if(!args[2]->IsUint32()) return NanThrowError("Third argument must be a positive number of documents to deserialize"); - if(!args[3]->IsArray()) return NanThrowError("Fourth argument must be an array the size of documents to deserialize"); - if(!args[4]->IsUint32()) return NanThrowError("Sixth argument must be a positive index number"); - } - - // If we have 4 arguments - if(args.Length() == 6 && !args[5]->IsObject()) return NanThrowError("Fifth argument must be an object with options"); - - // Define pointer to data - Local obj = args[0]->ToObject(); - uint32_t numberOfDocuments = args[2]->Uint32Value(); - uint32_t index = args[1]->Uint32Value(); - uint32_t resultIndex = args[4]->Uint32Value(); - bool promoteLongs = true; - - // Check for the value promoteLongs in the options object - if(args.Length() == 6) { - Local options = args[5]->ToObject(); - - // Check if we have the promoteLong variable - if(options->Has(String::New("promoteLongs"))) { - promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); - } - } - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // Unpack the buffer variable -#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Local buffer = ObjectWrap::Unwrap(obj); - char* data = buffer->data(); - size_t length = buffer->length(); -#else - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); -#endif - - // Fetch the documents - Local documents = args[3]->ToObject(); - - BSONDeserializer deserializer(bson, data+index, length-index); - for(uint32_t i = 0; i < numberOfDocuments; i++) - { - try - { - documents->Set(i + resultIndex, deserializer.DeserializeDocument(promoteLongs)); - } - catch (char* exception) - { - Local error = String::New(exception); - free(exception); - return NanThrowError(error); - } - } - - // Return new index of parsing - NanReturnValue(Uint32::New((uint32_t) (index + deserializer.GetSerializeSize()))); -} - -// Exporting function -extern "C" void init(Handle target) -{ - NanScope(); - BSON::Initialize(target); -} - -NODE_MODULE(bson, BSON::Initialize); +//=========================================================================== + +#include +#include +#include +#include +#include + +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunused-parameter" +#endif + +#include + +// this and the above block must be around the v8.h header otherwise +// v8 is not happy +#ifdef __clang__ +#pragma clang diagnostic pop +#endif + +#include +#include +#include + +#include +#include +#include +#include + +#ifdef __sun + #include +#endif + +#include "bson.h" + +using namespace v8; +using namespace node; + +//=========================================================================== + +void DataStream::WriteObjectId(const Handle& object, const Handle& key) +{ + uint16_t buffer[12]; + object->Get(key)->ToString()->Write(buffer, 0, 12); + for(uint32_t i = 0; i < 12; ++i) + { + *p++ = (char) buffer[i]; + } +} + +void ThrowAllocatedStringException(size_t allocationSize, const char* format, ...) +{ + va_list args; + va_start(args, format); + char* string = (char*) malloc(allocationSize); + vsprintf(string, format, args); + va_end(args); + + throw string; +} + +void DataStream::CheckKey(const Local& keyName) +{ + size_t keyLength = keyName->Utf8Length(); + if(keyLength == 0) return; + + // Allocate space for the key, do not need to zero terminate as WriteUtf8 does it + char* keyStringBuffer = (char*) alloca(keyLength + 1); + // Write the key to the allocated buffer + keyName->WriteUtf8(keyStringBuffer); + // Check for the zero terminator + char* terminator = strchr(keyStringBuffer, 0x00); + + // If the location is not at the end of the string we've got an illegal 0x00 byte somewhere + if(terminator != &keyStringBuffer[keyLength]) { + ThrowAllocatedStringException(64+keyLength, "key %s must not contain null bytes", keyStringBuffer); + } + + if(keyStringBuffer[0] == '$') + { + ThrowAllocatedStringException(64+keyLength, "key %s must not start with '$'", keyStringBuffer); + } + + if(strchr(keyStringBuffer, '.') != NULL) + { + ThrowAllocatedStringException(64+keyLength, "key %s must not contain '.'", keyStringBuffer); + } +} + +template void BSONSerializer::SerializeDocument(const Handle& value) +{ + void* documentSize = this->BeginWriteSize(); + Local object = bson->GetSerializeObject(value); + + // Get the object property names + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 + Local propertyNames = object->GetPropertyNames(); + #else + Local propertyNames = object->GetOwnPropertyNames(); + #endif + + // Length of the property + int propertyLength = propertyNames->Length(); + for(int i = 0; i < propertyLength; ++i) + { + const Local& propertyName = propertyNames->Get(i)->ToString(); + if(checkKeys) this->CheckKey(propertyName); + + const Local& propertyValue = object->Get(propertyName); + + if(serializeFunctions || !propertyValue->IsFunction()) + { + void* typeLocation = this->BeginWriteType(); + this->WriteString(propertyName); + SerializeValue(typeLocation, propertyValue); + } + } + + this->WriteByte(0); + this->CommitSize(documentSize); +} + +template void BSONSerializer::SerializeArray(const Handle& value) +{ + void* documentSize = this->BeginWriteSize(); + + Local array = Local::Cast(value->ToObject()); + uint32_t arrayLength = array->Length(); + + for(uint32_t i = 0; i < arrayLength; ++i) + { + void* typeLocation = this->BeginWriteType(); + this->WriteUInt32String(i); + SerializeValue(typeLocation, array->Get(i)); + } + + this->WriteByte(0); + this->CommitSize(documentSize); +} + +// This is templated so that we can use this function to both count the number of bytes, and to serialize those bytes. +// The template approach eliminates almost all of the inspection of values unless they're required (eg. string lengths) +// and ensures that there is always consistency between bytes counted and bytes written by design. +template void BSONSerializer::SerializeValue(void* typeLocation, const Handle& value) +{ + if(value->IsNumber()) + { + double doubleValue = value->NumberValue(); + int intValue = (int) doubleValue; + if(intValue == doubleValue) + { + this->CommitType(typeLocation, BSON_TYPE_INT); + this->WriteInt32(intValue); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_NUMBER); + this->WriteDouble(doubleValue); + } + } + else if(value->IsString()) + { + this->CommitType(typeLocation, BSON_TYPE_STRING); + this->WriteLengthPrefixedString(value->ToString()); + } + else if(value->IsBoolean()) + { + this->CommitType(typeLocation, BSON_TYPE_BOOLEAN); + this->WriteBool(value); + } + else if(value->IsArray()) + { + this->CommitType(typeLocation, BSON_TYPE_ARRAY); + SerializeArray(value); + } + else if(value->IsDate()) + { + this->CommitType(typeLocation, BSON_TYPE_DATE); + this->WriteInt64(value); + } + else if(value->IsRegExp()) + { + this->CommitType(typeLocation, BSON_TYPE_REGEXP); + const Handle& regExp = Handle::Cast(value); + + this->WriteString(regExp->GetSource()); + + int flags = regExp->GetFlags(); + if(flags & RegExp::kGlobal) this->WriteByte('s'); + if(flags & RegExp::kIgnoreCase) this->WriteByte('i'); + if(flags & RegExp::kMultiline) this->WriteByte('m'); + this->WriteByte(0); + } + else if(value->IsFunction()) + { + this->CommitType(typeLocation, BSON_TYPE_CODE); + this->WriteLengthPrefixedString(value->ToString()); + } + else if(value->IsObject()) + { + const Local& object = value->ToObject(); + if(object->Has(NanPersistentToLocal(bson->_bsontypeString))) + { + const Local& constructorString = object->GetConstructorName(); + if(NanPersistentToLocal(bson->longString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_LONG); + this->WriteInt32(object, NanPersistentToLocal(bson->_longLowString)); + this->WriteInt32(object, NanPersistentToLocal(bson->_longHighString)); + } + else if(NanPersistentToLocal(bson->timestampString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_TIMESTAMP); + this->WriteInt32(object, NanPersistentToLocal(bson->_longLowString)); + this->WriteInt32(object, NanPersistentToLocal(bson->_longHighString)); + } + else if(NanPersistentToLocal(bson->objectIDString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_OID); + this->WriteObjectId(object, NanPersistentToLocal(bson->_objectIDidString)); + } + else if(NanPersistentToLocal(bson->binaryString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_BINARY); + + uint32_t length = object->Get(NanPersistentToLocal(bson->_binaryPositionString))->Uint32Value(); + Local bufferObj = object->Get(NanPersistentToLocal(bson->_binaryBufferString))->ToObject(); + + this->WriteInt32(length); + this->WriteByte(object, NanPersistentToLocal(bson->_binarySubTypeString)); // write subtype + // If type 0x02 write the array length aswell + if(object->Get(NanPersistentToLocal(bson->_binarySubTypeString))->Int32Value() == 0x02) { + this->WriteInt32(length); + } + // Write the actual data + this->WriteData(Buffer::Data(bufferObj), length); + } + else if(NanPersistentToLocal(bson->doubleString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_NUMBER); + this->WriteDouble(object, NanPersistentToLocal(bson->_doubleValueString)); + } + else if(NanPersistentToLocal(bson->symbolString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_SYMBOL); + this->WriteLengthPrefixedString(object->Get(NanPersistentToLocal(bson->_symbolValueString))->ToString()); + } + else if(NanPersistentToLocal(bson->codeString)->StrictEquals(constructorString)) + { + const Local& function = object->Get(NanPersistentToLocal(bson->_codeCodeString))->ToString(); + const Local& scope = object->Get(NanPersistentToLocal(bson->_codeScopeString))->ToObject(); + + // For Node < 0.6.X use the GetPropertyNames + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 + uint32_t propertyNameLength = scope->GetPropertyNames()->Length(); + #else + uint32_t propertyNameLength = scope->GetOwnPropertyNames()->Length(); + #endif + + if(propertyNameLength > 0) + { + this->CommitType(typeLocation, BSON_TYPE_CODE_W_SCOPE); + void* codeWidthScopeSize = this->BeginWriteSize(); + this->WriteLengthPrefixedString(function->ToString()); + SerializeDocument(scope); + this->CommitSize(codeWidthScopeSize); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_CODE); + this->WriteLengthPrefixedString(function->ToString()); + } + } + else if(NanPersistentToLocal(bson->dbrefString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_OBJECT); + + void* dbRefSize = this->BeginWriteSize(); + + void* refType = this->BeginWriteType(); + this->WriteData("$ref", 5); + SerializeValue(refType, object->Get(NanPersistentToLocal(bson->_dbRefNamespaceString))); + + void* idType = this->BeginWriteType(); + this->WriteData("$id", 4); + SerializeValue(idType, object->Get(NanPersistentToLocal(bson->_dbRefOidString))); + + const Local& refDbValue = object->Get(NanPersistentToLocal(bson->_dbRefDbString)); + if(!refDbValue->IsUndefined()) + { + void* dbType = this->BeginWriteType(); + this->WriteData("$db", 4); + SerializeValue(dbType, refDbValue); + } + + this->WriteByte(0); + this->CommitSize(dbRefSize); + } + else if(NanPersistentToLocal(bson->minKeyString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_MIN_KEY); + } + else if(NanPersistentToLocal(bson->maxKeyString)->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_MAX_KEY); + } + } + else if(Buffer::HasInstance(value)) + { + this->CommitType(typeLocation, BSON_TYPE_BINARY); + + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Local buffer = ObjectWrap::Unwrap(value->ToObject()); + uint32_t length = object->length(); + #else + uint32_t length = Buffer::Length(value->ToObject()); + #endif + + this->WriteInt32(length); + this->WriteByte(0); + this->WriteData(Buffer::Data(value->ToObject()), length); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_OBJECT); + SerializeDocument(value); + } + } + else if(value->IsNull() || value->IsUndefined()) + { + this->CommitType(typeLocation, BSON_TYPE_NULL); + } +} + +// Data points to start of element list, length is length of entire document including '\0' but excluding initial size +BSONDeserializer::BSONDeserializer(BSON* aBson, char* data, size_t length) +: bson(aBson), + pStart(data), + p(data), + pEnd(data + length - 1) +{ + if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); +} + +BSONDeserializer::BSONDeserializer(BSONDeserializer& parentSerializer, size_t length) +: bson(parentSerializer.bson), + pStart(parentSerializer.p), + p(parentSerializer.p), + pEnd(parentSerializer.p + length - 1) +{ + parentSerializer.p += length; + if(pEnd > parentSerializer.pEnd) ThrowAllocatedStringException(64, "Child document exceeds parent's bounds"); + if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); +} + +Handle BSONDeserializer::ReadCString() +{ + char* start = p; + while(*p++ && (p < pEnd)) { } + if(p > pEnd) { + return Null(); + } + return String::New(start, (int32_t) (p-start-1) ); +} + +int32_t BSONDeserializer::ReadRegexOptions() +{ + int32_t options = 0; + for(;;) + { + switch(*p++) + { + case '\0': return options; + case 's': options |= RegExp::kGlobal; break; + case 'i': options |= RegExp::kIgnoreCase; break; + case 'm': options |= RegExp::kMultiline; break; + } + } +} + +uint32_t BSONDeserializer::ReadIntegerString() +{ + uint32_t value = 0; + while(*p) + { + if(*p < '0' || *p > '9') ThrowAllocatedStringException(64, "Invalid key for array"); + value = value * 10 + *p++ - '0'; + } + ++p; + return value; +} + +Local BSONDeserializer::ReadString() +{ + uint32_t length = ReadUInt32(); + char* start = p; + p += length; + return String::New(start, length-1); +} + +Local BSONDeserializer::ReadObjectId() +{ + uint16_t objectId[12]; + for(size_t i = 0; i < 12; ++i) + { + objectId[i] = *reinterpret_cast(p++); + } + return String::New(objectId, 12); +} + +Handle BSONDeserializer::DeserializeDocument(bool promoteLongs) +{ + uint32_t length = ReadUInt32(); + if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Document is less than 5 bytes"); + + BSONDeserializer documentDeserializer(*this, length-4); + return documentDeserializer.DeserializeDocumentInternal(promoteLongs); +} + +Handle BSONDeserializer::DeserializeDocumentInternal(bool promoteLongs) +{ + Local returnObject = Object::New(); + + while(HasMoreData()) + { + BsonType type = (BsonType) ReadByte(); + const Handle& name = ReadCString(); + if(name->IsNull()) ThrowAllocatedStringException(64, "Bad BSON Document: illegal CString"); + // name->Is + const Handle& value = DeserializeValue(type, promoteLongs); + returnObject->ForceSet(name, value); + } + if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Document: Serialize consumed unexpected number of bytes"); + + // From JavaScript: + // if(object['$id'] != null) object = new DBRef(object['$ref'], object['$id'], object['$db']); + if(returnObject->Has(NanPersistentToLocal(bson->_dbRefIdRefString))) + { + Local argv[] = { returnObject->Get(NanPersistentToLocal(bson->_dbRefRefString)), returnObject->Get(NanPersistentToLocal(bson->_dbRefIdRefString)), returnObject->Get(NanPersistentToLocal(bson->_dbRefDbRefString)) }; + return NanPersistentToLocal(bson->dbrefConstructor)->NewInstance(3, argv); + } + else + { + return returnObject; + } +} + +Handle BSONDeserializer::DeserializeArray(bool promoteLongs) +{ + uint32_t length = ReadUInt32(); + if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Array Document is less than 5 bytes"); + + BSONDeserializer documentDeserializer(*this, length-4); + return documentDeserializer.DeserializeArrayInternal(promoteLongs); +} + +Handle BSONDeserializer::DeserializeArrayInternal(bool promoteLongs) +{ + Local returnArray = Array::New(); + + while(HasMoreData()) + { + BsonType type = (BsonType) ReadByte(); + uint32_t index = ReadIntegerString(); + const Handle& value = DeserializeValue(type, promoteLongs); + returnArray->Set(index, value); + } + if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Array: Serialize consumed unexpected number of bytes"); + + return returnArray; +} + +Handle BSONDeserializer::DeserializeValue(BsonType type, bool promoteLongs) +{ + switch(type) + { + case BSON_TYPE_STRING: + return ReadString(); + + case BSON_TYPE_INT: + return Integer::New(ReadInt32()); + + case BSON_TYPE_NUMBER: + return Number::New(ReadDouble()); + + case BSON_TYPE_NULL: + return Null(); + + case BSON_TYPE_UNDEFINED: + return Undefined(); + + case BSON_TYPE_TIMESTAMP: + { + int32_t lowBits = ReadInt32(); + int32_t highBits = ReadInt32(); + Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; + return NanPersistentToLocal(bson->timestampConstructor)->NewInstance(2, argv); + } + + case BSON_TYPE_BOOLEAN: + return (ReadByte() != 0) ? True() : False(); + + case BSON_TYPE_REGEXP: + { + const Handle& regex = ReadCString(); + if(regex->IsNull()) ThrowAllocatedStringException(64, "Bad BSON Document: illegal CString"); + int32_t options = ReadRegexOptions(); + return RegExp::New(regex->ToString(), (RegExp::Flags) options); + } + + case BSON_TYPE_CODE: + { + const Local& code = ReadString(); + const Local& scope = Object::New(); + Local argv[] = { code, scope }; + return NanPersistentToLocal(bson->codeConstructor)->NewInstance(2, argv); + } + + case BSON_TYPE_CODE_W_SCOPE: + { + ReadUInt32(); + const Local& code = ReadString(); + const Handle& scope = DeserializeDocument(promoteLongs); + Local argv[] = { code, scope->ToObject() }; + return NanPersistentToLocal(bson->codeConstructor)->NewInstance(2, argv); + } + + case BSON_TYPE_OID: + { + Local argv[] = { ReadObjectId() }; + return NanPersistentToLocal(bson->objectIDConstructor)->NewInstance(1, argv); + } + + case BSON_TYPE_BINARY: + { + uint32_t length = ReadUInt32(); + uint32_t subType = ReadByte(); + if(subType == 0x02) { + length = ReadInt32(); + } + + Local buffer = NanNewBufferHandle(p, length); + p += length; + + Handle argv[] = { buffer, Uint32::New(subType) }; + return NanPersistentToLocal(bson->binaryConstructor)->NewInstance(2, argv); + } + + case BSON_TYPE_LONG: + { + // Read 32 bit integers + int32_t lowBits = (int32_t) ReadInt32(); + int32_t highBits = (int32_t) ReadInt32(); + + // Promote long is enabled + if(promoteLongs) { + // If value is < 2^53 and >-2^53 + if((highBits < 0x200000 || (highBits == 0x200000 && lowBits == 0)) && highBits >= -0x200000) { + // Adjust the pointer and read as 64 bit value + p -= 8; + // Read the 64 bit value + int64_t finalValue = (int64_t) ReadInt64(); + return Number::New(finalValue); + } + } + + // Decode the Long value + Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; + return NanPersistentToLocal(bson->longConstructor)->NewInstance(2, argv); + } + + case BSON_TYPE_DATE: + return Date::New((double) ReadInt64()); + + case BSON_TYPE_ARRAY: + return DeserializeArray(promoteLongs); + + case BSON_TYPE_OBJECT: + return DeserializeDocument(promoteLongs); + + case BSON_TYPE_SYMBOL: + { + const Local& string = ReadString(); + Local argv[] = { string }; + return NanPersistentToLocal(bson->symbolConstructor)->NewInstance(1, argv); + } + + case BSON_TYPE_MIN_KEY: + return NanPersistentToLocal(bson->minKeyConstructor)->NewInstance(); + + case BSON_TYPE_MAX_KEY: + return NanPersistentToLocal(bson->maxKeyConstructor)->NewInstance(); + + default: + ThrowAllocatedStringException(64, "Unhandled BSON Type: %d", type); + } + + return v8::Null(); +} + +Persistent BSON::constructor_template; + +BSON::BSON() : ObjectWrap() +{ + // Setup pre-allocated comparision objects + NanAssignPersistent(String, _bsontypeString, String::New("_bsontype")); + NanAssignPersistent(String, _longLowString, String::New("low_")); + NanAssignPersistent(String, _longHighString, String::New("high_")); + NanAssignPersistent(String, _objectIDidString, String::New("id")); + NanAssignPersistent(String, _binaryPositionString, String::New("position")); + NanAssignPersistent(String, _binarySubTypeString, String::New("sub_type")); + NanAssignPersistent(String, _binaryBufferString, String::New("buffer")); + NanAssignPersistent(String, _doubleValueString, String::New("value")); + NanAssignPersistent(String, _symbolValueString, String::New("value")); + NanAssignPersistent(String, _dbRefRefString, String::New("$ref")); + NanAssignPersistent(String, _dbRefIdRefString, String::New("$id")); + NanAssignPersistent(String, _dbRefDbRefString, String::New("$db")); + NanAssignPersistent(String, _dbRefNamespaceString, String::New("namespace")); + NanAssignPersistent(String, _dbRefDbString, String::New("db")); + NanAssignPersistent(String, _dbRefOidString, String::New("oid")); + NanAssignPersistent(String, _codeCodeString, String::New("code")); + NanAssignPersistent(String, _codeScopeString, String::New("scope")); + NanAssignPersistent(String, _toBSONString, String::New("toBSON")); + + NanAssignPersistent(String, longString, String::New("Long")); + NanAssignPersistent(String, objectIDString, String::New("ObjectID")); + NanAssignPersistent(String, binaryString, String::New("Binary")); + NanAssignPersistent(String, codeString, String::New("Code")); + NanAssignPersistent(String, dbrefString, String::New("DBRef")); + NanAssignPersistent(String, symbolString, String::New("Symbol")); + NanAssignPersistent(String, doubleString, String::New("Double")); + NanAssignPersistent(String, timestampString, String::New("Timestamp")); + NanAssignPersistent(String, minKeyString, String::New("MinKey")); + NanAssignPersistent(String, maxKeyString, String::New("MaxKey")); +} + +void BSON::Initialize(v8::Handle target) +{ + // Grab the scope of the call from Node + NanScope(); + // Define a new function template + Local t = FunctionTemplate::New(New); + t->InstanceTemplate()->SetInternalFieldCount(1); + t->SetClassName(String::NewSymbol("BSON")); + + // Instance methods + NODE_SET_PROTOTYPE_METHOD(t, "calculateObjectSize", CalculateObjectSize); + NODE_SET_PROTOTYPE_METHOD(t, "serialize", BSONSerialize); + NODE_SET_PROTOTYPE_METHOD(t, "serializeWithBufferAndIndex", SerializeWithBufferAndIndex); + NODE_SET_PROTOTYPE_METHOD(t, "deserialize", BSONDeserialize); + NODE_SET_PROTOTYPE_METHOD(t, "deserializeStream", BSONDeserializeStream); + + NanAssignPersistent(FunctionTemplate, constructor_template, t); + + target->ForceSet(String::NewSymbol("BSON"), t->GetFunction()); +} + +// Create a new instance of BSON and passing it the existing context +NAN_METHOD(BSON::New) +{ + NanScope(); + + // Check that we have an array + if(args.Length() == 1 && args[0]->IsArray()) + { + // Cast the array to a local reference + Local array = Local::Cast(args[0]); + + if(array->Length() > 0) + { + // Create a bson object instance and return it + BSON *bson = new BSON(); + + uint32_t foundClassesMask = 0; + + // Iterate over all entries to save the instantiate funtions + for(uint32_t i = 0; i < array->Length(); i++) { + // Let's get a reference to the function + Local func = Local::Cast(array->Get(i)); + Local functionName = func->GetName()->ToString(); + + // Save the functions making them persistant handles (they don't get collected) + if(functionName->StrictEquals(NanPersistentToLocal(bson->longString))) { + NanAssignPersistent(Function, bson->longConstructor, func); + foundClassesMask |= 1; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->objectIDString))) { + NanAssignPersistent(Function, bson->objectIDConstructor, func); + foundClassesMask |= 2; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->binaryString))) { + NanAssignPersistent(Function, bson->binaryConstructor, func); + foundClassesMask |= 4; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->codeString))) { + NanAssignPersistent(Function, bson->codeConstructor, func); + foundClassesMask |= 8; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->dbrefString))) { + NanAssignPersistent(Function, bson->dbrefConstructor, func); + foundClassesMask |= 0x10; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->symbolString))) { + NanAssignPersistent(Function, bson->symbolConstructor, func); + foundClassesMask |= 0x20; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->doubleString))) { + NanAssignPersistent(Function, bson->doubleConstructor, func); + foundClassesMask |= 0x40; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->timestampString))) { + NanAssignPersistent(Function, bson->timestampConstructor, func); + foundClassesMask |= 0x80; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->minKeyString))) { + NanAssignPersistent(Function, bson->minKeyConstructor, func); + foundClassesMask |= 0x100; + } else if(functionName->StrictEquals(NanPersistentToLocal(bson->maxKeyString))) { + NanAssignPersistent(Function, bson->maxKeyConstructor, func); + foundClassesMask |= 0x200; + } + } + + // Check if we have the right number of constructors otherwise throw an error + if(foundClassesMask != 0x3ff) { + delete bson; + return NanThrowError("Missing function constructor for either [Long/ObjectID/Binary/Code/DbRef/Symbol/Double/Timestamp/MinKey/MaxKey]"); + } else { + bson->Wrap(args.This()); + NanReturnValue(args.This()); + } + } + else + { + return NanThrowError("No types passed in"); + } + } + else + { + return NanThrowTypeError("Argument passed in must be an array of types"); + } +} + +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ + +NAN_METHOD(BSON::BSONDeserialize) +{ + NanScope(); + + // Fail if the first argument is not a string or a buffer + if(args.Length() > 1 && !args[0]->IsString() && !Buffer::HasInstance(args[0])) + return NanThrowError("First Argument must be a Buffer or String."); + + // Promote longs + bool promoteLongs = true; + + // If we have an options object + if(args.Length() == 2 && args[1]->IsObject()) { + Local options = args[1]->ToObject(); + + if(options->Has(String::New("promoteLongs"))) { + promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); + } + } + + // Define pointer to data + Local obj = args[0]->ToObject(); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // If we passed in a buffer, let's unpack it, otherwise let's unpack the string + if(Buffer::HasInstance(obj)) + { +#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Local buffer = ObjectWrap::Unwrap(obj); + char* data = buffer->data(); + size_t length = buffer->length(); +#else + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); +#endif + + // Validate that we have at least 5 bytes + if(length < 5) return NanThrowError("corrupt bson message < 5 bytes long"); + + try + { + BSONDeserializer deserializer(bson, data, length); + // deserializer.promoteLongs = promoteLongs; + NanReturnValue(deserializer.DeserializeDocument(promoteLongs)); + } + catch(char* exception) + { + Local error = String::New(exception); + free(exception); + return NanThrowError(error); + } + + } + else + { + // The length of the data for this encoding + ssize_t len = DecodeBytes(args[0], BINARY); + + // Validate that we have at least 5 bytes + if(len < 5) return NanThrowError("corrupt bson message < 5 bytes long"); + + // Let's define the buffer size + char* data = (char *)malloc(len); + DecodeWrite(data, len, args[0], BINARY); + + try + { + BSONDeserializer deserializer(bson, data, len); + // deserializer.promoteLongs = promoteLongs; + Handle result = deserializer.DeserializeDocument(promoteLongs); + free(data); + NanReturnValue(result); + + } + catch(char* exception) + { + Local error = String::New(exception); + free(exception); + free(data); + return NanThrowError(error); + } + } +} + +Local BSON::GetSerializeObject(const Handle& argValue) +{ + Local object = argValue->ToObject(); + if(object->Has(NanPersistentToLocal(_toBSONString))) + { + const Local& toBSON = object->Get(NanPersistentToLocal(_toBSONString)); + if(!toBSON->IsFunction()) ThrowAllocatedStringException(64, "toBSON is not a function"); + + Local result = Local::Cast(toBSON)->Call(object, 0, NULL); + if(!result->IsObject()) ThrowAllocatedStringException(64, "toBSON function did not return an object"); + return result->ToObject(); + } + else + { + return object; + } +} + +NAN_METHOD(BSON::BSONSerialize) +{ + NanScope(); + + if(args.Length() == 1 && !args[0]->IsObject()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 3 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean() && !args[3]->IsBoolean()) return NanThrowError("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); + if(args.Length() > 4) return NanThrowError("One, two, tree or four arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); + + // Check if we have an array as the object + if(args[0]->IsArray()) return NanThrowError("Only javascript objects supported"); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // Calculate the total size of the document in binary form to ensure we only allocate memory once + // With serialize function + bool serializeFunctions = (args.Length() >= 4) && args[3]->BooleanValue(); + + char *serialized_object = NULL; + size_t object_size; + try + { + Local object = bson->GetSerializeObject(args[0]); + + BSONSerializer counter(bson, false, serializeFunctions); + counter.SerializeDocument(object); + object_size = counter.GetSerializeSize(); + + // Allocate the memory needed for the serialization + serialized_object = (char *)malloc(object_size); + + // Check if we have a boolean value + bool checkKeys = args.Length() >= 3 && args[1]->IsBoolean() && args[1]->BooleanValue(); + BSONSerializer data(bson, checkKeys, serializeFunctions, serialized_object); + data.SerializeDocument(object); + } + catch(char *err_msg) + { + free(serialized_object); + Local error = String::New(err_msg); + free(err_msg); + return NanThrowError(error); + } + + // If we have 3 arguments + if(args.Length() == 3 || args.Length() == 4) + { + Local buffer = NanNewBufferHandle(serialized_object, object_size); + free(serialized_object); + NanReturnValue(buffer); + } + else + { + Local bin_value = Encode(serialized_object, object_size, BINARY)->ToString(); + free(serialized_object); + NanReturnValue(bin_value); + } +} + +NAN_METHOD(BSON::CalculateObjectSize) +{ + NanScope(); + // Ensure we have a valid object + if(args.Length() == 1 && !args[0]->IsObject()) return NanThrowError("One argument required - [object]"); + if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return NanThrowError("Two arguments required - [object, boolean]"); + if(args.Length() > 3) return NanThrowError("One or two arguments required - [object] or [object, boolean]"); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + bool serializeFunctions = (args.Length() >= 2) && args[1]->BooleanValue(); + BSONSerializer countSerializer(bson, false, serializeFunctions); + countSerializer.SerializeDocument(args[0]); + + // Return the object size + NanReturnValue(Uint32::New((uint32_t) countSerializer.GetSerializeSize())); +} + +NAN_METHOD(BSON::SerializeWithBufferAndIndex) +{ + NanScope(); + + //BSON.serializeWithBufferAndIndex = function serializeWithBufferAndIndex(object, ->, buffer, index) { + // Ensure we have the correct values + if(args.Length() > 5) return NanThrowError("Four or five parameters required [object, boolean, Buffer, int] or [object, boolean, Buffer, int, boolean]"); + if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32()) return NanThrowError("Four parameters required [object, boolean, Buffer, int]"); + if(args.Length() == 5 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32() && !args[4]->IsBoolean()) return NanThrowError("Four parameters required [object, boolean, Buffer, int, boolean]"); + + uint32_t index; + size_t object_size; + + try + { + BSON *bson = ObjectWrap::Unwrap(args.This()); + + Local obj = args[2]->ToObject(); + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); + + index = args[3]->Uint32Value(); + bool checkKeys = args.Length() >= 4 && args[1]->IsBoolean() && args[1]->BooleanValue(); + bool serializeFunctions = (args.Length() == 5) && args[4]->BooleanValue(); + + BSONSerializer dataSerializer(bson, checkKeys, serializeFunctions, data+index); + dataSerializer.SerializeDocument(bson->GetSerializeObject(args[0])); + object_size = dataSerializer.GetSerializeSize(); + + if(object_size + index > length) return NanThrowError("Serious error - overflowed buffer!!"); + } + catch(char *exception) + { + Local error = String::New(exception); + free(exception); + return NanThrowError(error); + } + + NanReturnValue(Uint32::New((uint32_t) (index + object_size - 1))); +} + +NAN_METHOD(BSON::BSONDeserializeStream) +{ + NanScope(); + + // At least 3 arguments required + if(args.Length() < 5) return NanThrowError("Arguments required (Buffer(data), Number(index in data), Number(number of documents to deserialize), Array(results), Number(index in the array), Object(optional))"); + + // If the number of argumets equals 3 + if(args.Length() >= 5) + { + if(!Buffer::HasInstance(args[0])) return NanThrowError("First argument must be Buffer instance"); + if(!args[1]->IsUint32()) return NanThrowError("Second argument must be a positive index number"); + if(!args[2]->IsUint32()) return NanThrowError("Third argument must be a positive number of documents to deserialize"); + if(!args[3]->IsArray()) return NanThrowError("Fourth argument must be an array the size of documents to deserialize"); + if(!args[4]->IsUint32()) return NanThrowError("Sixth argument must be a positive index number"); + } + + // If we have 4 arguments + if(args.Length() == 6 && !args[5]->IsObject()) return NanThrowError("Fifth argument must be an object with options"); + + // Define pointer to data + Local obj = args[0]->ToObject(); + uint32_t numberOfDocuments = args[2]->Uint32Value(); + uint32_t index = args[1]->Uint32Value(); + uint32_t resultIndex = args[4]->Uint32Value(); + bool promoteLongs = true; + + // Check for the value promoteLongs in the options object + if(args.Length() == 6) { + Local options = args[5]->ToObject(); + + // Check if we have the promoteLong variable + if(options->Has(String::New("promoteLongs"))) { + promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); + } + } + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // Unpack the buffer variable +#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Local buffer = ObjectWrap::Unwrap(obj); + char* data = buffer->data(); + size_t length = buffer->length(); +#else + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); +#endif + + // Fetch the documents + Local documents = args[3]->ToObject(); + + BSONDeserializer deserializer(bson, data+index, length-index); + for(uint32_t i = 0; i < numberOfDocuments; i++) + { + try + { + documents->Set(i + resultIndex, deserializer.DeserializeDocument(promoteLongs)); + } + catch (char* exception) + { + Local error = String::New(exception); + free(exception); + return NanThrowError(error); + } + } + + // Return new index of parsing + NanReturnValue(Uint32::New((uint32_t) (index + deserializer.GetSerializeSize()))); +} + +// Exporting function +extern "C" void init(Handle target) +{ + NanScope(); + BSON::Initialize(target); +} + +NODE_MODULE(bson, BSON::Initialize); diff --git a/node_modules/mongodb/node_modules/bson/ext/bson.h b/node_modules/bson/ext/bson.h similarity index 97% rename from node_modules/mongodb/node_modules/bson/ext/bson.h rename to node_modules/bson/ext/bson.h index dad8412ac..13f0cc469 100644 --- a/node_modules/mongodb/node_modules/bson/ext/bson.h +++ b/node_modules/bson/ext/bson.h @@ -1,278 +1,278 @@ -//=========================================================================== - -#ifndef BSON_H_ -#define BSON_H_ - -//=========================================================================== - -#ifdef __arm__ -#define USE_MISALIGNED_MEMORY_ACCESS 0 -#else -#define USE_MISALIGNED_MEMORY_ACCESS 1 -#endif - -#include -#include -#include -#include "nan.h" - -using namespace v8; -using namespace node; - -//=========================================================================== - -enum BsonType -{ - BSON_TYPE_NUMBER = 1, - BSON_TYPE_STRING = 2, - BSON_TYPE_OBJECT = 3, - BSON_TYPE_ARRAY = 4, - BSON_TYPE_BINARY = 5, - BSON_TYPE_UNDEFINED = 6, - BSON_TYPE_OID = 7, - BSON_TYPE_BOOLEAN = 8, - BSON_TYPE_DATE = 9, - BSON_TYPE_NULL = 10, - BSON_TYPE_REGEXP = 11, - BSON_TYPE_CODE = 13, - BSON_TYPE_SYMBOL = 14, - BSON_TYPE_CODE_W_SCOPE = 15, - BSON_TYPE_INT = 16, - BSON_TYPE_TIMESTAMP = 17, - BSON_TYPE_LONG = 18, - BSON_TYPE_MAX_KEY = 0x7f, - BSON_TYPE_MIN_KEY = 0xff -}; - -//=========================================================================== - -template class BSONSerializer; - -class BSON : public ObjectWrap { -public: - BSON(); - ~BSON() {} - - static void Initialize(Handle target); - static NAN_METHOD(BSONDeserializeStream); - - // JS based objects - static NAN_METHOD(BSONSerialize); - static NAN_METHOD(BSONDeserialize); - - // Calculate size of function - static NAN_METHOD(CalculateObjectSize); - static NAN_METHOD(SerializeWithBufferAndIndex); - - // Constructor used for creating new BSON objects from C++ - static Persistent constructor_template; - -private: - static NAN_METHOD(New); - static Handle deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item); - - // BSON type instantiate functions - Persistent longConstructor; - Persistent objectIDConstructor; - Persistent binaryConstructor; - Persistent codeConstructor; - Persistent dbrefConstructor; - Persistent symbolConstructor; - Persistent doubleConstructor; - Persistent timestampConstructor; - Persistent minKeyConstructor; - Persistent maxKeyConstructor; - - // Equality Objects - Persistent longString; - Persistent objectIDString; - Persistent binaryString; - Persistent codeString; - Persistent dbrefString; - Persistent symbolString; - Persistent doubleString; - Persistent timestampString; - Persistent minKeyString; - Persistent maxKeyString; - - // Equality speed up comparison objects - Persistent _bsontypeString; - Persistent _longLowString; - Persistent _longHighString; - Persistent _objectIDidString; - Persistent _binaryPositionString; - Persistent _binarySubTypeString; - Persistent _binaryBufferString; - Persistent _doubleValueString; - Persistent _symbolValueString; - - Persistent _dbRefRefString; - Persistent _dbRefIdRefString; - Persistent _dbRefDbRefString; - Persistent _dbRefNamespaceString; - Persistent _dbRefDbString; - Persistent _dbRefOidString; - - Persistent _codeCodeString; - Persistent _codeScopeString; - Persistent _toBSONString; - - Local GetSerializeObject(const Handle& object); - - template friend class BSONSerializer; - friend class BSONDeserializer; -}; - -//=========================================================================== - -class CountStream -{ -public: - CountStream() : count(0) { } - - void WriteByte(int value) { ++count; } - void WriteByte(const Handle&, const Handle&) { ++count; } - void WriteBool(const Handle& value) { ++count; } - void WriteInt32(int32_t value) { count += 4; } - void WriteInt32(const Handle& value) { count += 4; } - void WriteInt32(const Handle& object, const Handle& key) { count += 4; } - void WriteInt64(int64_t value) { count += 8; } - void WriteInt64(const Handle& value) { count += 8; } - void WriteDouble(double value) { count += 8; } - void WriteDouble(const Handle& value) { count += 8; } - void WriteDouble(const Handle&, const Handle&) { count += 8; } - void WriteUInt32String(uint32_t name) { char buffer[32]; count += sprintf(buffer, "%u", name) + 1; } - void WriteLengthPrefixedString(const Local& value) { count += value->Utf8Length()+5; } - void WriteObjectId(const Handle& object, const Handle& key) { count += 12; } - void WriteString(const Local& value) { count += value->Utf8Length() + 1; } // This returns the number of bytes exclusive of the NULL terminator - void WriteData(const char* data, size_t length) { count += length; } - - void* BeginWriteType() { ++count; return NULL; } - void CommitType(void*, BsonType) { } - void* BeginWriteSize() { count += 4; return NULL; } - void CommitSize(void*) { } - - size_t GetSerializeSize() const { return count; } - - // Do nothing. CheckKey is implemented for DataStream - void CheckKey(const Local&) { } - -private: - size_t count; -}; - -class DataStream -{ -public: - DataStream(char* aDestinationBuffer) : destinationBuffer(aDestinationBuffer), p(aDestinationBuffer) { } - - void WriteByte(int value) { *p++ = value; } - void WriteByte(const Handle& object, const Handle& key) { *p++ = object->Get(key)->Int32Value(); } -#if USE_MISALIGNED_MEMORY_ACCESS - void WriteInt32(int32_t value) { *reinterpret_cast(p) = value; p += 4; } - void WriteInt64(int64_t value) { *reinterpret_cast(p) = value; p += 8; } - void WriteDouble(double value) { *reinterpret_cast(p) = value; p += 8; } -#else - void WriteInt32(int32_t value) { memcpy(p, &value, 4); p += 4; } - void WriteInt64(int64_t value) { memcpy(p, &value, 8); p += 8; } - void WriteDouble(double value) { memcpy(p, &value, 8); p += 8; } -#endif - void WriteBool(const Handle& value) { WriteByte(value->BooleanValue() ? 1 : 0); } - void WriteInt32(const Handle& value) { WriteInt32(value->Int32Value()); } - void WriteInt32(const Handle& object, const Handle& key) { WriteInt32(object->Get(key)); } - void WriteInt64(const Handle& value) { WriteInt64(value->IntegerValue()); } - void WriteDouble(const Handle& value) { WriteDouble(value->NumberValue()); } - void WriteDouble(const Handle& object, const Handle& key) { WriteDouble(object->Get(key)); } - void WriteUInt32String(uint32_t name) { p += sprintf(p, "%u", name) + 1; } - void WriteLengthPrefixedString(const Local& value) { WriteInt32(value->Utf8Length()+1); WriteString(value); } - void WriteObjectId(const Handle& object, const Handle& key); - void WriteString(const Local& value) { p += value->WriteUtf8(p); } // This returns the number of bytes inclusive of the NULL terminator. - void WriteData(const char* data, size_t length) { memcpy(p, data, length); p += length; } - - void* BeginWriteType() { void* returnValue = p; p++; return returnValue; } - void CommitType(void* beginPoint, BsonType value) { *reinterpret_cast(beginPoint) = value; } - void* BeginWriteSize() { void* returnValue = p; p += 4; return returnValue; } - -#if USE_MISALIGNED_MEMORY_ACCESS - void CommitSize(void* beginPoint) { *reinterpret_cast(beginPoint) = (int32_t) (p - (char*) beginPoint); } -#else - void CommitSize(void* beginPoint) { int32_t value = (int32_t) (p - (char*) beginPoint); memcpy(beginPoint, &value, 4); } -#endif - - size_t GetSerializeSize() const { return p - destinationBuffer; } - - void CheckKey(const Local& keyName); - -protected: - char *const destinationBuffer; // base, never changes - char* p; // cursor into buffer -}; - -template class BSONSerializer : public T -{ -private: - typedef T Inherited; - -public: - BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions) : Inherited(), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } - BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions, char* parentParam) : Inherited(parentParam), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } - - void SerializeDocument(const Handle& value); - void SerializeArray(const Handle& value); - void SerializeValue(void* typeLocation, const Handle& value); - -private: - bool checkKeys; - bool serializeFunctions; - BSON* bson; -}; - -//=========================================================================== - -class BSONDeserializer -{ -public: - BSONDeserializer(BSON* aBson, char* data, size_t length); - BSONDeserializer(BSONDeserializer& parentSerializer, size_t length); - - Handle DeserializeDocument(bool promoteLongs); - - bool HasMoreData() const { return p < pEnd; } - Handle ReadCString(); - uint32_t ReadIntegerString(); - int32_t ReadRegexOptions(); - Local ReadString(); - Local ReadObjectId(); - - unsigned char ReadByte() { return *reinterpret_cast(p++); } -#if USE_MISALIGNED_MEMORY_ACCESS - int32_t ReadInt32() { int32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } - uint32_t ReadUInt32() { uint32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } - int64_t ReadInt64() { int64_t returnValue = *reinterpret_cast(p); p += 8; return returnValue; } - double ReadDouble() { double returnValue = *reinterpret_cast(p); p += 8; return returnValue; } -#else - int32_t ReadInt32() { int32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } - uint32_t ReadUInt32() { uint32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } - int64_t ReadInt64() { int64_t returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } - double ReadDouble() { double returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } -#endif - - size_t GetSerializeSize() const { return p - pStart; } - -private: - Handle DeserializeArray(bool promoteLongs); - Handle DeserializeValue(BsonType type, bool promoteLongs); - Handle DeserializeDocumentInternal(bool promoteLongs); - Handle DeserializeArrayInternal(bool promoteLongs); - - BSON* bson; - char* const pStart; - char* p; - char* const pEnd; -}; - -//=========================================================================== - -#endif // BSON_H_ - -//=========================================================================== +//=========================================================================== + +#ifndef BSON_H_ +#define BSON_H_ + +//=========================================================================== + +#ifdef __arm__ +#define USE_MISALIGNED_MEMORY_ACCESS 0 +#else +#define USE_MISALIGNED_MEMORY_ACCESS 1 +#endif + +#include +#include +#include +#include "nan.h" + +using namespace v8; +using namespace node; + +//=========================================================================== + +enum BsonType +{ + BSON_TYPE_NUMBER = 1, + BSON_TYPE_STRING = 2, + BSON_TYPE_OBJECT = 3, + BSON_TYPE_ARRAY = 4, + BSON_TYPE_BINARY = 5, + BSON_TYPE_UNDEFINED = 6, + BSON_TYPE_OID = 7, + BSON_TYPE_BOOLEAN = 8, + BSON_TYPE_DATE = 9, + BSON_TYPE_NULL = 10, + BSON_TYPE_REGEXP = 11, + BSON_TYPE_CODE = 13, + BSON_TYPE_SYMBOL = 14, + BSON_TYPE_CODE_W_SCOPE = 15, + BSON_TYPE_INT = 16, + BSON_TYPE_TIMESTAMP = 17, + BSON_TYPE_LONG = 18, + BSON_TYPE_MAX_KEY = 0x7f, + BSON_TYPE_MIN_KEY = 0xff +}; + +//=========================================================================== + +template class BSONSerializer; + +class BSON : public ObjectWrap { +public: + BSON(); + ~BSON() {} + + static void Initialize(Handle target); + static NAN_METHOD(BSONDeserializeStream); + + // JS based objects + static NAN_METHOD(BSONSerialize); + static NAN_METHOD(BSONDeserialize); + + // Calculate size of function + static NAN_METHOD(CalculateObjectSize); + static NAN_METHOD(SerializeWithBufferAndIndex); + + // Constructor used for creating new BSON objects from C++ + static Persistent constructor_template; + +private: + static NAN_METHOD(New); + static Handle deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item); + + // BSON type instantiate functions + Persistent longConstructor; + Persistent objectIDConstructor; + Persistent binaryConstructor; + Persistent codeConstructor; + Persistent dbrefConstructor; + Persistent symbolConstructor; + Persistent doubleConstructor; + Persistent timestampConstructor; + Persistent minKeyConstructor; + Persistent maxKeyConstructor; + + // Equality Objects + Persistent longString; + Persistent objectIDString; + Persistent binaryString; + Persistent codeString; + Persistent dbrefString; + Persistent symbolString; + Persistent doubleString; + Persistent timestampString; + Persistent minKeyString; + Persistent maxKeyString; + + // Equality speed up comparison objects + Persistent _bsontypeString; + Persistent _longLowString; + Persistent _longHighString; + Persistent _objectIDidString; + Persistent _binaryPositionString; + Persistent _binarySubTypeString; + Persistent _binaryBufferString; + Persistent _doubleValueString; + Persistent _symbolValueString; + + Persistent _dbRefRefString; + Persistent _dbRefIdRefString; + Persistent _dbRefDbRefString; + Persistent _dbRefNamespaceString; + Persistent _dbRefDbString; + Persistent _dbRefOidString; + + Persistent _codeCodeString; + Persistent _codeScopeString; + Persistent _toBSONString; + + Local GetSerializeObject(const Handle& object); + + template friend class BSONSerializer; + friend class BSONDeserializer; +}; + +//=========================================================================== + +class CountStream +{ +public: + CountStream() : count(0) { } + + void WriteByte(int value) { ++count; } + void WriteByte(const Handle&, const Handle&) { ++count; } + void WriteBool(const Handle& value) { ++count; } + void WriteInt32(int32_t value) { count += 4; } + void WriteInt32(const Handle& value) { count += 4; } + void WriteInt32(const Handle& object, const Handle& key) { count += 4; } + void WriteInt64(int64_t value) { count += 8; } + void WriteInt64(const Handle& value) { count += 8; } + void WriteDouble(double value) { count += 8; } + void WriteDouble(const Handle& value) { count += 8; } + void WriteDouble(const Handle&, const Handle&) { count += 8; } + void WriteUInt32String(uint32_t name) { char buffer[32]; count += sprintf(buffer, "%u", name) + 1; } + void WriteLengthPrefixedString(const Local& value) { count += value->Utf8Length()+5; } + void WriteObjectId(const Handle& object, const Handle& key) { count += 12; } + void WriteString(const Local& value) { count += value->Utf8Length() + 1; } // This returns the number of bytes exclusive of the NULL terminator + void WriteData(const char* data, size_t length) { count += length; } + + void* BeginWriteType() { ++count; return NULL; } + void CommitType(void*, BsonType) { } + void* BeginWriteSize() { count += 4; return NULL; } + void CommitSize(void*) { } + + size_t GetSerializeSize() const { return count; } + + // Do nothing. CheckKey is implemented for DataStream + void CheckKey(const Local&) { } + +private: + size_t count; +}; + +class DataStream +{ +public: + DataStream(char* aDestinationBuffer) : destinationBuffer(aDestinationBuffer), p(aDestinationBuffer) { } + + void WriteByte(int value) { *p++ = value; } + void WriteByte(const Handle& object, const Handle& key) { *p++ = object->Get(key)->Int32Value(); } +#if USE_MISALIGNED_MEMORY_ACCESS + void WriteInt32(int32_t value) { *reinterpret_cast(p) = value; p += 4; } + void WriteInt64(int64_t value) { *reinterpret_cast(p) = value; p += 8; } + void WriteDouble(double value) { *reinterpret_cast(p) = value; p += 8; } +#else + void WriteInt32(int32_t value) { memcpy(p, &value, 4); p += 4; } + void WriteInt64(int64_t value) { memcpy(p, &value, 8); p += 8; } + void WriteDouble(double value) { memcpy(p, &value, 8); p += 8; } +#endif + void WriteBool(const Handle& value) { WriteByte(value->BooleanValue() ? 1 : 0); } + void WriteInt32(const Handle& value) { WriteInt32(value->Int32Value()); } + void WriteInt32(const Handle& object, const Handle& key) { WriteInt32(object->Get(key)); } + void WriteInt64(const Handle& value) { WriteInt64(value->IntegerValue()); } + void WriteDouble(const Handle& value) { WriteDouble(value->NumberValue()); } + void WriteDouble(const Handle& object, const Handle& key) { WriteDouble(object->Get(key)); } + void WriteUInt32String(uint32_t name) { p += sprintf(p, "%u", name) + 1; } + void WriteLengthPrefixedString(const Local& value) { WriteInt32(value->Utf8Length()+1); WriteString(value); } + void WriteObjectId(const Handle& object, const Handle& key); + void WriteString(const Local& value) { p += value->WriteUtf8(p); } // This returns the number of bytes inclusive of the NULL terminator. + void WriteData(const char* data, size_t length) { memcpy(p, data, length); p += length; } + + void* BeginWriteType() { void* returnValue = p; p++; return returnValue; } + void CommitType(void* beginPoint, BsonType value) { *reinterpret_cast(beginPoint) = value; } + void* BeginWriteSize() { void* returnValue = p; p += 4; return returnValue; } + +#if USE_MISALIGNED_MEMORY_ACCESS + void CommitSize(void* beginPoint) { *reinterpret_cast(beginPoint) = (int32_t) (p - (char*) beginPoint); } +#else + void CommitSize(void* beginPoint) { int32_t value = (int32_t) (p - (char*) beginPoint); memcpy(beginPoint, &value, 4); } +#endif + + size_t GetSerializeSize() const { return p - destinationBuffer; } + + void CheckKey(const Local& keyName); + +protected: + char *const destinationBuffer; // base, never changes + char* p; // cursor into buffer +}; + +template class BSONSerializer : public T +{ +private: + typedef T Inherited; + +public: + BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions) : Inherited(), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } + BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions, char* parentParam) : Inherited(parentParam), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } + + void SerializeDocument(const Handle& value); + void SerializeArray(const Handle& value); + void SerializeValue(void* typeLocation, const Handle& value); + +private: + bool checkKeys; + bool serializeFunctions; + BSON* bson; +}; + +//=========================================================================== + +class BSONDeserializer +{ +public: + BSONDeserializer(BSON* aBson, char* data, size_t length); + BSONDeserializer(BSONDeserializer& parentSerializer, size_t length); + + Handle DeserializeDocument(bool promoteLongs); + + bool HasMoreData() const { return p < pEnd; } + Handle ReadCString(); + uint32_t ReadIntegerString(); + int32_t ReadRegexOptions(); + Local ReadString(); + Local ReadObjectId(); + + unsigned char ReadByte() { return *reinterpret_cast(p++); } +#if USE_MISALIGNED_MEMORY_ACCESS + int32_t ReadInt32() { int32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } + uint32_t ReadUInt32() { uint32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } + int64_t ReadInt64() { int64_t returnValue = *reinterpret_cast(p); p += 8; return returnValue; } + double ReadDouble() { double returnValue = *reinterpret_cast(p); p += 8; return returnValue; } +#else + int32_t ReadInt32() { int32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } + uint32_t ReadUInt32() { uint32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } + int64_t ReadInt64() { int64_t returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } + double ReadDouble() { double returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } +#endif + + size_t GetSerializeSize() const { return p - pStart; } + +private: + Handle DeserializeArray(bool promoteLongs); + Handle DeserializeValue(BsonType type, bool promoteLongs); + Handle DeserializeDocumentInternal(bool promoteLongs); + Handle DeserializeArrayInternal(bool promoteLongs); + + BSON* bson; + char* const pStart; + char* p; + char* const pEnd; +}; + +//=========================================================================== + +#endif // BSON_H_ + +//=========================================================================== diff --git a/node_modules/mongodb/node_modules/bson/ext/index.js b/node_modules/bson/ext/index.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/index.js rename to node_modules/bson/ext/index.js diff --git a/node_modules/mongodb/node_modules/bson/ext/nan.h b/node_modules/bson/ext/nan.h similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/nan.h rename to node_modules/bson/ext/nan.h diff --git a/node_modules/mongodb/node_modules/bson/ext/win32/ia32/bson.node b/node_modules/bson/ext/win32/ia32/bson.node similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/win32/ia32/bson.node rename to node_modules/bson/ext/win32/ia32/bson.node diff --git a/node_modules/mongodb/node_modules/bson/ext/win32/x64/bson.node b/node_modules/bson/ext/win32/x64/bson.node similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/win32/x64/bson.node rename to node_modules/bson/ext/win32/x64/bson.node diff --git a/node_modules/mongodb/node_modules/bson/ext/wscript b/node_modules/bson/ext/wscript similarity index 100% rename from node_modules/mongodb/node_modules/bson/ext/wscript rename to node_modules/bson/ext/wscript diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/binary.js b/node_modules/bson/lib/bson/binary.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/binary.js rename to node_modules/bson/lib/bson/binary.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/binary_parser.js b/node_modules/bson/lib/bson/binary_parser.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/binary_parser.js rename to node_modules/bson/lib/bson/binary_parser.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/bson.js b/node_modules/bson/lib/bson/bson.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/bson.js rename to node_modules/bson/lib/bson/bson.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/code.js b/node_modules/bson/lib/bson/code.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/code.js rename to node_modules/bson/lib/bson/code.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/db_ref.js b/node_modules/bson/lib/bson/db_ref.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/db_ref.js rename to node_modules/bson/lib/bson/db_ref.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/double.js b/node_modules/bson/lib/bson/double.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/double.js rename to node_modules/bson/lib/bson/double.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/float_parser.js b/node_modules/bson/lib/bson/float_parser.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/float_parser.js rename to node_modules/bson/lib/bson/float_parser.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/index.js b/node_modules/bson/lib/bson/index.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/index.js rename to node_modules/bson/lib/bson/index.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/long.js b/node_modules/bson/lib/bson/long.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/long.js rename to node_modules/bson/lib/bson/long.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/max_key.js b/node_modules/bson/lib/bson/max_key.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/max_key.js rename to node_modules/bson/lib/bson/max_key.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/min_key.js b/node_modules/bson/lib/bson/min_key.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/min_key.js rename to node_modules/bson/lib/bson/min_key.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/objectid.js b/node_modules/bson/lib/bson/objectid.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/objectid.js rename to node_modules/bson/lib/bson/objectid.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/symbol.js b/node_modules/bson/lib/bson/symbol.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/symbol.js rename to node_modules/bson/lib/bson/symbol.js diff --git a/node_modules/mongodb/node_modules/bson/lib/bson/timestamp.js b/node_modules/bson/lib/bson/timestamp.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/lib/bson/timestamp.js rename to node_modules/bson/lib/bson/timestamp.js diff --git a/node_modules/bson/package.json b/node_modules/bson/package.json new file mode 100644 index 000000000..8b0423d34 --- /dev/null +++ b/node_modules/bson/package.json @@ -0,0 +1,74 @@ +{ + "_from": "bson@0.2.5", + "_id": "bson@0.2.5", + "_inBundle": false, + "_integrity": "sha1-UA0m2IPdyOAvLIgBFidjYRHBBcU=", + "_location": "/bson", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "bson@0.2.5", + "name": "bson", + "escapedName": "bson", + "rawSpec": "0.2.5", + "saveSpec": null, + "fetchSpec": "0.2.5" + }, + "_requiredBy": [ + "/mongodb" + ], + "_resolved": "https://registry.npmjs.org/bson/-/bson-0.2.5.tgz", + "_shasum": "500d26d883ddc8e02f2c88011627636111c105c5", + "_spec": "bson@0.2.5", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongodb", + "author": { + "name": "Christian Amor Kvalheim", + "email": "christkv@gmail.com" + }, + "browser": "lib/bson/bson.js", + "bugs": { + "url": "https://github.com/mongodb/js-bson/issues" + }, + "bundleDependencies": false, + "config": { + "native": false + }, + "contributors": [], + "deprecated": "Fixed a critical issue with BSON serialization documented in CVE-2019-2391, see https://bit.ly/2KcpXdo for more details", + "description": "A bson parser for node.js and the browser", + "devDependencies": { + "gleak": "0.2.3", + "nodeunit": "0.8.2", + "one": "2.X.X" + }, + "directories": { + "lib": "./lib/bson" + }, + "engines": { + "node": ">=0.6.19" + }, + "homepage": "https://github.com/mongodb/js-bson#readme", + "keywords": [ + "mongodb", + "bson", + "parser" + ], + "licenses": [ + { + "type": "Apache License, Version 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "./lib/bson/index", + "name": "bson", + "repository": { + "type": "git", + "url": "git://github.com/mongodb/js-bson.git" + }, + "scripts": { + "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", + "test": "nodeunit ./test/node && TEST_NATIVE=TRUE nodeunit ./test/node" + }, + "version": "0.2.5" +} diff --git a/node_modules/mongodb/node_modules/bson/tools/gleak.js b/node_modules/bson/tools/gleak.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/gleak.js rename to node_modules/bson/tools/gleak.js diff --git a/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE b/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE rename to node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE diff --git a/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js b/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js rename to node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js diff --git a/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.css b/node_modules/bson/tools/jasmine-1.1.0/jasmine.css similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.css rename to node_modules/bson/tools/jasmine-1.1.0/jasmine.css diff --git a/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.js b/node_modules/bson/tools/jasmine-1.1.0/jasmine.js similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.js rename to node_modules/bson/tools/jasmine-1.1.0/jasmine.js diff --git a/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png b/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png similarity index 100% rename from node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png rename to node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png diff --git a/node_modules/express/node_modules/buffer-crc32/.npmignore b/node_modules/buffer-crc32/.npmignore similarity index 100% rename from node_modules/express/node_modules/buffer-crc32/.npmignore rename to node_modules/buffer-crc32/.npmignore diff --git a/node_modules/express/node_modules/buffer-crc32/.travis.yml b/node_modules/buffer-crc32/.travis.yml similarity index 100% rename from node_modules/express/node_modules/buffer-crc32/.travis.yml rename to node_modules/buffer-crc32/.travis.yml diff --git a/node_modules/express/node_modules/buffer-crc32/README.md b/node_modules/buffer-crc32/README.md similarity index 100% rename from node_modules/express/node_modules/buffer-crc32/README.md rename to node_modules/buffer-crc32/README.md diff --git a/node_modules/express/node_modules/buffer-crc32/index.js b/node_modules/buffer-crc32/index.js similarity index 100% rename from node_modules/express/node_modules/buffer-crc32/index.js rename to node_modules/buffer-crc32/index.js diff --git a/node_modules/buffer-crc32/package.json b/node_modules/buffer-crc32/package.json new file mode 100644 index 000000000..73a86dcf0 --- /dev/null +++ b/node_modules/buffer-crc32/package.json @@ -0,0 +1,61 @@ +{ + "_from": "buffer-crc32@0.2.1", + "_id": "buffer-crc32@0.2.1", + "_inBundle": false, + "_integrity": "sha1-vj5TgvwCttYySVasGvmKqYsIU0w=", + "_location": "/buffer-crc32", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "buffer-crc32@0.2.1", + "name": "buffer-crc32", + "escapedName": "buffer-crc32", + "rawSpec": "0.2.1", + "saveSpec": null, + "fetchSpec": "0.2.1" + }, + "_requiredBy": [ + "/connect", + "/express" + ], + "_resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "_shasum": "be3e5382fc02b6d6324956ac1af98aa98b08534c", + "_spec": "buffer-crc32@0.2.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "Brian J. Brennan", + "email": "brianloveswords@gmail.com", + "url": "http://bjb.io" + }, + "bugs": { + "url": "https://github.com/brianloveswords/buffer-crc32/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Vladimir Kuznetsov" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "A pure javascript CRC32 algorithm that plays nice with binary data", + "devDependencies": { + "tap": "~0.2.5" + }, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/brianloveswords/buffer-crc32", + "main": "index.js", + "name": "buffer-crc32", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/buffer-crc32.git" + }, + "scripts": { + "test": "tap tests/*.test.js" + }, + "version": "0.2.1" +} diff --git a/node_modules/express/node_modules/buffer-crc32/tests/crc.test.js b/node_modules/buffer-crc32/tests/crc.test.js similarity index 100% rename from node_modules/express/node_modules/buffer-crc32/tests/crc.test.js rename to node_modules/buffer-crc32/tests/crc.test.js diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/.npmignore b/node_modules/bytes/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/.npmignore rename to node_modules/bytes/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/History.md b/node_modules/bytes/History.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/History.md rename to node_modules/bytes/History.md diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/Makefile b/node_modules/bytes/Makefile similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/Makefile rename to node_modules/bytes/Makefile diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/Readme.md rename to node_modules/bytes/Readme.md diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/component.json b/node_modules/bytes/component.json similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/component.json rename to node_modules/bytes/component.json diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/index.js b/node_modules/bytes/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/bytes/index.js rename to node_modules/bytes/index.js diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json new file mode 100644 index 000000000..cc6a245ba --- /dev/null +++ b/node_modules/bytes/package.json @@ -0,0 +1,47 @@ +{ + "_from": "bytes@0.2.1", + "_id": "bytes@0.2.1", + "_inBundle": false, + "_integrity": "sha1-VVsIq8sGP4l1kFMCUj5M1P/f3zE=", + "_location": "/bytes", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "bytes@0.2.1", + "name": "bytes", + "escapedName": "bytes", + "rawSpec": "0.2.1", + "saveSpec": null, + "fetchSpec": "0.2.1" + }, + "_requiredBy": [ + "/connect", + "/raw-body" + ], + "_resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", + "_shasum": "555b08abcb063f8975905302523e4cd4ffdfdf31", + "_spec": "bytes@0.2.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bundleDependencies": false, + "component": { + "scripts": { + "bytes/index.js": "index.js" + } + }, + "dependencies": {}, + "deprecated": false, + "description": "byte size string parser / serializer", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "main": "index.js", + "name": "bytes", + "version": "0.2.1" +} diff --git a/node_modules/express/node_modules/commander/History.md b/node_modules/commander/History.md similarity index 100% rename from node_modules/express/node_modules/commander/History.md rename to node_modules/commander/History.md diff --git a/node_modules/express/node_modules/commander/Readme.md b/node_modules/commander/Readme.md similarity index 100% rename from node_modules/express/node_modules/commander/Readme.md rename to node_modules/commander/Readme.md diff --git a/node_modules/express/node_modules/commander/index.js b/node_modules/commander/index.js similarity index 100% rename from node_modules/express/node_modules/commander/index.js rename to node_modules/commander/index.js diff --git a/node_modules/commander/package.json b/node_modules/commander/package.json new file mode 100644 index 000000000..efe759438 --- /dev/null +++ b/node_modules/commander/package.json @@ -0,0 +1,62 @@ +{ + "_from": "commander@1.3.2", + "_id": "commander@1.3.2", + "_inBundle": false, + "_integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", + "_location": "/commander", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "commander@1.3.2", + "name": "commander", + "escapedName": "commander", + "rawSpec": "1.3.2", + "saveSpec": null, + "fetchSpec": "1.3.2" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "_shasum": "8a8f30ec670a6fdd64af52f1914b907d79ead5b5", + "_spec": "commander@1.3.2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "bugs": { + "url": "https://github.com/visionmedia/commander.js/issues" + }, + "bundleDependencies": false, + "dependencies": { + "keypress": "0.1.x" + }, + "deprecated": false, + "description": "the complete solution for node.js command-line programs", + "devDependencies": { + "should": ">= 0.0.1" + }, + "engines": { + "node": ">= 0.6.x" + }, + "homepage": "https://github.com/visionmedia/commander.js#readme", + "keywords": [ + "command", + "option", + "parser", + "prompt", + "stdin" + ], + "main": "index", + "name": "commander", + "repository": { + "type": "git", + "url": "git+https://github.com/visionmedia/commander.js.git" + }, + "scripts": { + "test": "make test" + }, + "version": "1.3.2" +} diff --git a/node_modules/express/node_modules/connect/.npmignore b/node_modules/connect/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/.npmignore rename to node_modules/connect/.npmignore diff --git a/node_modules/express/node_modules/connect/.travis.yml b/node_modules/connect/.travis.yml similarity index 100% rename from node_modules/express/node_modules/connect/.travis.yml rename to node_modules/connect/.travis.yml diff --git a/node_modules/express/node_modules/connect/LICENSE b/node_modules/connect/LICENSE similarity index 100% rename from node_modules/express/node_modules/connect/LICENSE rename to node_modules/connect/LICENSE diff --git a/node_modules/express/node_modules/connect/Readme.md b/node_modules/connect/Readme.md similarity index 100% rename from node_modules/express/node_modules/connect/Readme.md rename to node_modules/connect/Readme.md diff --git a/node_modules/express/node_modules/connect/index.js b/node_modules/connect/index.js similarity index 100% rename from node_modules/express/node_modules/connect/index.js rename to node_modules/connect/index.js diff --git a/node_modules/express/node_modules/connect/lib/cache.js b/node_modules/connect/lib/cache.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/cache.js rename to node_modules/connect/lib/cache.js diff --git a/node_modules/express/node_modules/connect/lib/connect.js b/node_modules/connect/lib/connect.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/connect.js rename to node_modules/connect/lib/connect.js diff --git a/node_modules/express/node_modules/connect/lib/index.js b/node_modules/connect/lib/index.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/index.js rename to node_modules/connect/lib/index.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/basicAuth.js b/node_modules/connect/lib/middleware/basicAuth.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/basicAuth.js rename to node_modules/connect/lib/middleware/basicAuth.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/bodyParser.js b/node_modules/connect/lib/middleware/bodyParser.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/bodyParser.js rename to node_modules/connect/lib/middleware/bodyParser.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/compress.js b/node_modules/connect/lib/middleware/compress.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/compress.js rename to node_modules/connect/lib/middleware/compress.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/cookieParser.js b/node_modules/connect/lib/middleware/cookieParser.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/cookieParser.js rename to node_modules/connect/lib/middleware/cookieParser.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/cookieSession.js b/node_modules/connect/lib/middleware/cookieSession.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/cookieSession.js rename to node_modules/connect/lib/middleware/cookieSession.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/csrf.js b/node_modules/connect/lib/middleware/csrf.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/csrf.js rename to node_modules/connect/lib/middleware/csrf.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/directory.js b/node_modules/connect/lib/middleware/directory.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/directory.js rename to node_modules/connect/lib/middleware/directory.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/errorHandler.js b/node_modules/connect/lib/middleware/errorHandler.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/errorHandler.js rename to node_modules/connect/lib/middleware/errorHandler.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/favicon.js b/node_modules/connect/lib/middleware/favicon.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/favicon.js rename to node_modules/connect/lib/middleware/favicon.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/json.js b/node_modules/connect/lib/middleware/json.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/json.js rename to node_modules/connect/lib/middleware/json.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/limit.js b/node_modules/connect/lib/middleware/limit.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/limit.js rename to node_modules/connect/lib/middleware/limit.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/logger.js b/node_modules/connect/lib/middleware/logger.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/logger.js rename to node_modules/connect/lib/middleware/logger.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/methodOverride.js b/node_modules/connect/lib/middleware/methodOverride.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/methodOverride.js rename to node_modules/connect/lib/middleware/methodOverride.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/multipart.js b/node_modules/connect/lib/middleware/multipart.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/multipart.js rename to node_modules/connect/lib/middleware/multipart.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/query.js b/node_modules/connect/lib/middleware/query.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/query.js rename to node_modules/connect/lib/middleware/query.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/responseTime.js b/node_modules/connect/lib/middleware/responseTime.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/responseTime.js rename to node_modules/connect/lib/middleware/responseTime.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/session.js b/node_modules/connect/lib/middleware/session.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/session.js rename to node_modules/connect/lib/middleware/session.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/session/cookie.js b/node_modules/connect/lib/middleware/session/cookie.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/session/cookie.js rename to node_modules/connect/lib/middleware/session/cookie.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/session/memory.js b/node_modules/connect/lib/middleware/session/memory.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/session/memory.js rename to node_modules/connect/lib/middleware/session/memory.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/session/session.js b/node_modules/connect/lib/middleware/session/session.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/session/session.js rename to node_modules/connect/lib/middleware/session/session.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/session/store.js b/node_modules/connect/lib/middleware/session/store.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/session/store.js rename to node_modules/connect/lib/middleware/session/store.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/static.js b/node_modules/connect/lib/middleware/static.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/static.js rename to node_modules/connect/lib/middleware/static.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/staticCache.js b/node_modules/connect/lib/middleware/staticCache.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/staticCache.js rename to node_modules/connect/lib/middleware/staticCache.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/timeout.js b/node_modules/connect/lib/middleware/timeout.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/timeout.js rename to node_modules/connect/lib/middleware/timeout.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/urlencoded.js b/node_modules/connect/lib/middleware/urlencoded.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/urlencoded.js rename to node_modules/connect/lib/middleware/urlencoded.js diff --git a/node_modules/express/node_modules/connect/lib/middleware/vhost.js b/node_modules/connect/lib/middleware/vhost.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/middleware/vhost.js rename to node_modules/connect/lib/middleware/vhost.js diff --git a/node_modules/express/node_modules/connect/lib/patch.js b/node_modules/connect/lib/patch.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/patch.js rename to node_modules/connect/lib/patch.js diff --git a/node_modules/express/node_modules/connect/lib/proto.js b/node_modules/connect/lib/proto.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/proto.js rename to node_modules/connect/lib/proto.js diff --git a/node_modules/express/node_modules/connect/lib/public/directory.html b/node_modules/connect/lib/public/directory.html similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/directory.html rename to node_modules/connect/lib/public/directory.html diff --git a/node_modules/express/node_modules/connect/lib/public/error.html b/node_modules/connect/lib/public/error.html similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/error.html rename to node_modules/connect/lib/public/error.html diff --git a/node_modules/express/node_modules/connect/lib/public/favicon.ico b/node_modules/connect/lib/public/favicon.ico similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/favicon.ico rename to node_modules/connect/lib/public/favicon.ico diff --git a/node_modules/express/node_modules/connect/lib/public/icons/folder.png b/node_modules/connect/lib/public/icons/folder.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/folder.png rename to node_modules/connect/lib/public/icons/folder.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page.png b/node_modules/connect/lib/public/icons/page.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page.png rename to node_modules/connect/lib/public/icons/page.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_add.png b/node_modules/connect/lib/public/icons/page_add.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_add.png rename to node_modules/connect/lib/public/icons/page_add.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_attach.png b/node_modules/connect/lib/public/icons/page_attach.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_attach.png rename to node_modules/connect/lib/public/icons/page_attach.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_code.png b/node_modules/connect/lib/public/icons/page_code.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_code.png rename to node_modules/connect/lib/public/icons/page_code.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_copy.png b/node_modules/connect/lib/public/icons/page_copy.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_copy.png rename to node_modules/connect/lib/public/icons/page_copy.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_delete.png b/node_modules/connect/lib/public/icons/page_delete.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_delete.png rename to node_modules/connect/lib/public/icons/page_delete.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_edit.png b/node_modules/connect/lib/public/icons/page_edit.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_edit.png rename to node_modules/connect/lib/public/icons/page_edit.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_error.png b/node_modules/connect/lib/public/icons/page_error.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_error.png rename to node_modules/connect/lib/public/icons/page_error.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_excel.png b/node_modules/connect/lib/public/icons/page_excel.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_excel.png rename to node_modules/connect/lib/public/icons/page_excel.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_find.png b/node_modules/connect/lib/public/icons/page_find.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_find.png rename to node_modules/connect/lib/public/icons/page_find.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_gear.png b/node_modules/connect/lib/public/icons/page_gear.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_gear.png rename to node_modules/connect/lib/public/icons/page_gear.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_go.png b/node_modules/connect/lib/public/icons/page_go.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_go.png rename to node_modules/connect/lib/public/icons/page_go.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_green.png b/node_modules/connect/lib/public/icons/page_green.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_green.png rename to node_modules/connect/lib/public/icons/page_green.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_key.png b/node_modules/connect/lib/public/icons/page_key.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_key.png rename to node_modules/connect/lib/public/icons/page_key.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_lightning.png b/node_modules/connect/lib/public/icons/page_lightning.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_lightning.png rename to node_modules/connect/lib/public/icons/page_lightning.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_link.png b/node_modules/connect/lib/public/icons/page_link.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_link.png rename to node_modules/connect/lib/public/icons/page_link.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_paintbrush.png b/node_modules/connect/lib/public/icons/page_paintbrush.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_paintbrush.png rename to node_modules/connect/lib/public/icons/page_paintbrush.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_paste.png b/node_modules/connect/lib/public/icons/page_paste.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_paste.png rename to node_modules/connect/lib/public/icons/page_paste.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_red.png b/node_modules/connect/lib/public/icons/page_red.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_red.png rename to node_modules/connect/lib/public/icons/page_red.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_refresh.png b/node_modules/connect/lib/public/icons/page_refresh.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_refresh.png rename to node_modules/connect/lib/public/icons/page_refresh.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_save.png b/node_modules/connect/lib/public/icons/page_save.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_save.png rename to node_modules/connect/lib/public/icons/page_save.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white.png b/node_modules/connect/lib/public/icons/page_white.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white.png rename to node_modules/connect/lib/public/icons/page_white.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_acrobat.png b/node_modules/connect/lib/public/icons/page_white_acrobat.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_acrobat.png rename to node_modules/connect/lib/public/icons/page_white_acrobat.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_actionscript.png b/node_modules/connect/lib/public/icons/page_white_actionscript.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_actionscript.png rename to node_modules/connect/lib/public/icons/page_white_actionscript.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_add.png b/node_modules/connect/lib/public/icons/page_white_add.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_add.png rename to node_modules/connect/lib/public/icons/page_white_add.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_c.png b/node_modules/connect/lib/public/icons/page_white_c.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_c.png rename to node_modules/connect/lib/public/icons/page_white_c.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_camera.png b/node_modules/connect/lib/public/icons/page_white_camera.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_camera.png rename to node_modules/connect/lib/public/icons/page_white_camera.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_cd.png b/node_modules/connect/lib/public/icons/page_white_cd.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_cd.png rename to node_modules/connect/lib/public/icons/page_white_cd.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_code.png b/node_modules/connect/lib/public/icons/page_white_code.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_code.png rename to node_modules/connect/lib/public/icons/page_white_code.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_code_red.png b/node_modules/connect/lib/public/icons/page_white_code_red.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_code_red.png rename to node_modules/connect/lib/public/icons/page_white_code_red.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_coldfusion.png b/node_modules/connect/lib/public/icons/page_white_coldfusion.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_coldfusion.png rename to node_modules/connect/lib/public/icons/page_white_coldfusion.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_compressed.png b/node_modules/connect/lib/public/icons/page_white_compressed.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_compressed.png rename to node_modules/connect/lib/public/icons/page_white_compressed.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_copy.png b/node_modules/connect/lib/public/icons/page_white_copy.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_copy.png rename to node_modules/connect/lib/public/icons/page_white_copy.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_cplusplus.png b/node_modules/connect/lib/public/icons/page_white_cplusplus.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_cplusplus.png rename to node_modules/connect/lib/public/icons/page_white_cplusplus.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_csharp.png b/node_modules/connect/lib/public/icons/page_white_csharp.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_csharp.png rename to node_modules/connect/lib/public/icons/page_white_csharp.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_cup.png b/node_modules/connect/lib/public/icons/page_white_cup.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_cup.png rename to node_modules/connect/lib/public/icons/page_white_cup.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_database.png b/node_modules/connect/lib/public/icons/page_white_database.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_database.png rename to node_modules/connect/lib/public/icons/page_white_database.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_delete.png b/node_modules/connect/lib/public/icons/page_white_delete.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_delete.png rename to node_modules/connect/lib/public/icons/page_white_delete.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_dvd.png b/node_modules/connect/lib/public/icons/page_white_dvd.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_dvd.png rename to node_modules/connect/lib/public/icons/page_white_dvd.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_edit.png b/node_modules/connect/lib/public/icons/page_white_edit.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_edit.png rename to node_modules/connect/lib/public/icons/page_white_edit.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_error.png b/node_modules/connect/lib/public/icons/page_white_error.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_error.png rename to node_modules/connect/lib/public/icons/page_white_error.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_excel.png b/node_modules/connect/lib/public/icons/page_white_excel.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_excel.png rename to node_modules/connect/lib/public/icons/page_white_excel.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_find.png b/node_modules/connect/lib/public/icons/page_white_find.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_find.png rename to node_modules/connect/lib/public/icons/page_white_find.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_flash.png b/node_modules/connect/lib/public/icons/page_white_flash.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_flash.png rename to node_modules/connect/lib/public/icons/page_white_flash.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_freehand.png b/node_modules/connect/lib/public/icons/page_white_freehand.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_freehand.png rename to node_modules/connect/lib/public/icons/page_white_freehand.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_gear.png b/node_modules/connect/lib/public/icons/page_white_gear.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_gear.png rename to node_modules/connect/lib/public/icons/page_white_gear.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_get.png b/node_modules/connect/lib/public/icons/page_white_get.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_get.png rename to node_modules/connect/lib/public/icons/page_white_get.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_go.png b/node_modules/connect/lib/public/icons/page_white_go.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_go.png rename to node_modules/connect/lib/public/icons/page_white_go.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_h.png b/node_modules/connect/lib/public/icons/page_white_h.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_h.png rename to node_modules/connect/lib/public/icons/page_white_h.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_horizontal.png b/node_modules/connect/lib/public/icons/page_white_horizontal.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_horizontal.png rename to node_modules/connect/lib/public/icons/page_white_horizontal.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_key.png b/node_modules/connect/lib/public/icons/page_white_key.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_key.png rename to node_modules/connect/lib/public/icons/page_white_key.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_lightning.png b/node_modules/connect/lib/public/icons/page_white_lightning.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_lightning.png rename to node_modules/connect/lib/public/icons/page_white_lightning.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_link.png b/node_modules/connect/lib/public/icons/page_white_link.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_link.png rename to node_modules/connect/lib/public/icons/page_white_link.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_magnify.png b/node_modules/connect/lib/public/icons/page_white_magnify.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_magnify.png rename to node_modules/connect/lib/public/icons/page_white_magnify.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_medal.png b/node_modules/connect/lib/public/icons/page_white_medal.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_medal.png rename to node_modules/connect/lib/public/icons/page_white_medal.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_office.png b/node_modules/connect/lib/public/icons/page_white_office.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_office.png rename to node_modules/connect/lib/public/icons/page_white_office.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_paint.png b/node_modules/connect/lib/public/icons/page_white_paint.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_paint.png rename to node_modules/connect/lib/public/icons/page_white_paint.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_paintbrush.png b/node_modules/connect/lib/public/icons/page_white_paintbrush.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_paintbrush.png rename to node_modules/connect/lib/public/icons/page_white_paintbrush.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_paste.png b/node_modules/connect/lib/public/icons/page_white_paste.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_paste.png rename to node_modules/connect/lib/public/icons/page_white_paste.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_php.png b/node_modules/connect/lib/public/icons/page_white_php.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_php.png rename to node_modules/connect/lib/public/icons/page_white_php.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_picture.png b/node_modules/connect/lib/public/icons/page_white_picture.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_picture.png rename to node_modules/connect/lib/public/icons/page_white_picture.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_powerpoint.png b/node_modules/connect/lib/public/icons/page_white_powerpoint.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_powerpoint.png rename to node_modules/connect/lib/public/icons/page_white_powerpoint.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_put.png b/node_modules/connect/lib/public/icons/page_white_put.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_put.png rename to node_modules/connect/lib/public/icons/page_white_put.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_ruby.png b/node_modules/connect/lib/public/icons/page_white_ruby.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_ruby.png rename to node_modules/connect/lib/public/icons/page_white_ruby.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_stack.png b/node_modules/connect/lib/public/icons/page_white_stack.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_stack.png rename to node_modules/connect/lib/public/icons/page_white_stack.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_star.png b/node_modules/connect/lib/public/icons/page_white_star.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_star.png rename to node_modules/connect/lib/public/icons/page_white_star.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_swoosh.png b/node_modules/connect/lib/public/icons/page_white_swoosh.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_swoosh.png rename to node_modules/connect/lib/public/icons/page_white_swoosh.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_text.png b/node_modules/connect/lib/public/icons/page_white_text.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_text.png rename to node_modules/connect/lib/public/icons/page_white_text.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_text_width.png b/node_modules/connect/lib/public/icons/page_white_text_width.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_text_width.png rename to node_modules/connect/lib/public/icons/page_white_text_width.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_tux.png b/node_modules/connect/lib/public/icons/page_white_tux.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_tux.png rename to node_modules/connect/lib/public/icons/page_white_tux.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_vector.png b/node_modules/connect/lib/public/icons/page_white_vector.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_vector.png rename to node_modules/connect/lib/public/icons/page_white_vector.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_visualstudio.png b/node_modules/connect/lib/public/icons/page_white_visualstudio.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_visualstudio.png rename to node_modules/connect/lib/public/icons/page_white_visualstudio.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_width.png b/node_modules/connect/lib/public/icons/page_white_width.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_width.png rename to node_modules/connect/lib/public/icons/page_white_width.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_word.png b/node_modules/connect/lib/public/icons/page_white_word.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_word.png rename to node_modules/connect/lib/public/icons/page_white_word.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_world.png b/node_modules/connect/lib/public/icons/page_white_world.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_world.png rename to node_modules/connect/lib/public/icons/page_white_world.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_wrench.png b/node_modules/connect/lib/public/icons/page_white_wrench.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_wrench.png rename to node_modules/connect/lib/public/icons/page_white_wrench.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_white_zip.png b/node_modules/connect/lib/public/icons/page_white_zip.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_white_zip.png rename to node_modules/connect/lib/public/icons/page_white_zip.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_word.png b/node_modules/connect/lib/public/icons/page_word.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_word.png rename to node_modules/connect/lib/public/icons/page_word.png diff --git a/node_modules/express/node_modules/connect/lib/public/icons/page_world.png b/node_modules/connect/lib/public/icons/page_world.png similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/icons/page_world.png rename to node_modules/connect/lib/public/icons/page_world.png diff --git a/node_modules/express/node_modules/connect/lib/public/style.css b/node_modules/connect/lib/public/style.css similarity index 100% rename from node_modules/express/node_modules/connect/lib/public/style.css rename to node_modules/connect/lib/public/style.css diff --git a/node_modules/express/node_modules/connect/lib/utils.js b/node_modules/connect/lib/utils.js similarity index 100% rename from node_modules/express/node_modules/connect/lib/utils.js rename to node_modules/connect/lib/utils.js diff --git a/node_modules/connect/package.json b/node_modules/connect/package.json new file mode 100644 index 000000000..92702b69e --- /dev/null +++ b/node_modules/connect/package.json @@ -0,0 +1,86 @@ +{ + "_from": "connect@2.12.0", + "_id": "connect@2.12.0", + "_inBundle": false, + "_integrity": "sha1-Mdj6DcrN8ZCNgivSkjvootKn7Zo=", + "_location": "/connect", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "connect@2.12.0", + "name": "connect", + "escapedName": "connect", + "rawSpec": "2.12.0", + "saveSpec": null, + "fetchSpec": "2.12.0" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", + "_shasum": "31d8fa0dcacdf1908d822bd2923be8a2d2a7ed9a", + "_spec": "connect@2.12.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bugs": { + "url": "https://github.com/senchalabs/connect/issues" + }, + "bundleDependencies": false, + "dependencies": { + "batch": "0.5.0", + "buffer-crc32": "0.2.1", + "bytes": "0.2.1", + "cookie": "0.1.0", + "cookie-signature": "1.0.1", + "debug": ">= 0.7.3 < 1", + "fresh": "0.2.0", + "methods": "0.1.0", + "multiparty": "2.2.0", + "negotiator": "0.3.0", + "pause": "0.0.1", + "qs": "0.6.6", + "raw-body": "1.1.2", + "send": "0.1.4", + "uid2": "0.0.3" + }, + "deprecated": "connect 2.x series is deprecated", + "description": "High performance middleware framework", + "devDependencies": { + "dox": ">= 0.4.4 < 1", + "jade": ">= 0.35.0 < 1", + "mocha": ">= 1.13.0 < 2", + "should": ">= 2.0.2 < 3" + }, + "engines": { + "node": ">= 0.8.0" + }, + "homepage": "https://github.com/senchalabs/connect#readme", + "keywords": [ + "framework", + "web", + "middleware", + "connect", + "rack" + ], + "licenses": [ + { + "type": "MIT", + "url": "https://raw.github.com/senchalabs/connect/master/LICENSE" + } + ], + "main": "index", + "name": "connect", + "repository": { + "type": "git", + "url": "git://github.com/senchalabs/connect.git" + }, + "scripts": { + "test": "make" + }, + "version": "2.12.0" +} diff --git a/node_modules/express/node_modules/connect/node_modules/pause/.npmignore b/node_modules/cookie-signature/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/pause/.npmignore rename to node_modules/cookie-signature/.npmignore diff --git a/node_modules/express/node_modules/cookie-signature/History.md b/node_modules/cookie-signature/History.md similarity index 100% rename from node_modules/express/node_modules/cookie-signature/History.md rename to node_modules/cookie-signature/History.md diff --git a/node_modules/express/node_modules/connect/node_modules/pause/Makefile b/node_modules/cookie-signature/Makefile similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/pause/Makefile rename to node_modules/cookie-signature/Makefile diff --git a/node_modules/express/node_modules/cookie-signature/Readme.md b/node_modules/cookie-signature/Readme.md similarity index 100% rename from node_modules/express/node_modules/cookie-signature/Readme.md rename to node_modules/cookie-signature/Readme.md diff --git a/node_modules/express/node_modules/cookie-signature/index.js b/node_modules/cookie-signature/index.js similarity index 100% rename from node_modules/express/node_modules/cookie-signature/index.js rename to node_modules/cookie-signature/index.js diff --git a/node_modules/cookie-signature/package.json b/node_modules/cookie-signature/package.json new file mode 100644 index 000000000..75506fbc1 --- /dev/null +++ b/node_modules/cookie-signature/package.json @@ -0,0 +1,46 @@ +{ + "_from": "cookie-signature@1.0.1", + "_id": "cookie-signature@1.0.1", + "_inBundle": false, + "_integrity": "sha1-ROByFIrwHm6OJK+/EmkNaK5pjss=", + "_location": "/cookie-signature", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "cookie-signature@1.0.1", + "name": "cookie-signature", + "escapedName": "cookie-signature", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/connect", + "/express" + ], + "_resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "_shasum": "44e072148af01e6e8e24afbf12690d68ae698ecb", + "_spec": "cookie-signature@1.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@learnboost.com" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Sign and unsign cookies", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "cookie", + "sign", + "unsign" + ], + "main": "index", + "name": "cookie-signature", + "version": "1.0.1" +} diff --git a/node_modules/express/node_modules/cookie/.npmignore b/node_modules/cookie/.npmignore similarity index 100% rename from node_modules/express/node_modules/cookie/.npmignore rename to node_modules/cookie/.npmignore diff --git a/node_modules/express/node_modules/cookie/.travis.yml b/node_modules/cookie/.travis.yml similarity index 100% rename from node_modules/express/node_modules/cookie/.travis.yml rename to node_modules/cookie/.travis.yml diff --git a/node_modules/express/node_modules/cookie/LICENSE b/node_modules/cookie/LICENSE similarity index 100% rename from node_modules/express/node_modules/cookie/LICENSE rename to node_modules/cookie/LICENSE diff --git a/node_modules/express/node_modules/cookie/README.md b/node_modules/cookie/README.md similarity index 100% rename from node_modules/express/node_modules/cookie/README.md rename to node_modules/cookie/README.md diff --git a/node_modules/express/node_modules/cookie/index.js b/node_modules/cookie/index.js similarity index 100% rename from node_modules/express/node_modules/cookie/index.js rename to node_modules/cookie/index.js diff --git a/node_modules/cookie/package.json b/node_modules/cookie/package.json new file mode 100644 index 000000000..c2306d6c1 --- /dev/null +++ b/node_modules/cookie/package.json @@ -0,0 +1,59 @@ +{ + "_from": "cookie@0.1.0", + "_id": "cookie@0.1.0", + "_inBundle": false, + "_integrity": "sha1-kOtGndzpBchm3mh+/EMTHYgB+dA=", + "_location": "/cookie", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "cookie@0.1.0", + "name": "cookie", + "escapedName": "cookie", + "rawSpec": "0.1.0", + "saveSpec": null, + "fetchSpec": "0.1.0" + }, + "_requiredBy": [ + "/connect", + "/express" + ], + "_resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "_shasum": "90eb469ddce905c866de687efc43131d8801f9d0", + "_spec": "cookie@0.1.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "Roman Shtylman", + "email": "shtylman@gmail.com" + }, + "bugs": { + "url": "https://github.com/shtylman/node-cookie/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "cookie parsing and serialization", + "devDependencies": { + "mocha": "1.x.x" + }, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/shtylman/node-cookie#readme", + "keywords": [ + "cookie", + "cookies" + ], + "main": "index.js", + "name": "cookie", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git://github.com/shtylman/node-cookie.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "0.1.0" +} diff --git a/node_modules/express/node_modules/cookie/test/mocha.opts b/node_modules/cookie/test/mocha.opts similarity index 100% rename from node_modules/express/node_modules/cookie/test/mocha.opts rename to node_modules/cookie/test/mocha.opts diff --git a/node_modules/express/node_modules/cookie/test/parse.js b/node_modules/cookie/test/parse.js similarity index 100% rename from node_modules/express/node_modules/cookie/test/parse.js rename to node_modules/cookie/test/parse.js diff --git a/node_modules/express/node_modules/cookie/test/serialize.js b/node_modules/cookie/test/serialize.js similarity index 100% rename from node_modules/express/node_modules/cookie/test/serialize.js rename to node_modules/cookie/test/serialize.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/LICENSE b/node_modules/core-util-is/LICENSE similarity index 93% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/LICENSE rename to node_modules/core-util-is/LICENSE index a3187cc10..d8d7f9437 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/LICENSE +++ b/node_modules/core-util-is/LICENSE @@ -1,4 +1,4 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Copyright Node.js contributors. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/README.md b/node_modules/core-util-is/README.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/README.md rename to node_modules/core-util-is/README.md diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/float.patch b/node_modules/core-util-is/float.patch similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/float.patch rename to node_modules/core-util-is/float.patch diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/util.js b/node_modules/core-util-is/lib/util.js similarity index 88% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/util.js rename to node_modules/core-util-is/lib/util.js index 007fa1057..ff4c851c0 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/util.js +++ b/node_modules/core-util-is/lib/util.js @@ -21,8 +21,12 @@ // NOTE: These type checking functions intentionally don't use `instanceof` // because it is fragile and can be easily faked with `Object.create()`. -function isArray(ar) { - return Array.isArray(ar); + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; } exports.isArray = isArray; @@ -62,7 +66,7 @@ function isUndefined(arg) { exports.isUndefined = isUndefined; function isRegExp(re) { - return isObject(re) && objectToString(re) === '[object RegExp]'; + return objectToString(re) === '[object RegExp]'; } exports.isRegExp = isRegExp; @@ -72,12 +76,12 @@ function isObject(arg) { exports.isObject = isObject; function isDate(d) { - return isObject(d) && objectToString(d) === '[object Date]'; + return objectToString(d) === '[object Date]'; } exports.isDate = isDate; function isError(e) { - return isObject(e) && objectToString(e) === '[object Error]'; + return (objectToString(e) === '[object Error]' || e instanceof Error); } exports.isError = isError; @@ -96,10 +100,7 @@ function isPrimitive(arg) { } exports.isPrimitive = isPrimitive; -function isBuffer(arg) { - return arg instanceof Buffer; -} -exports.isBuffer = isBuffer; +exports.isBuffer = Buffer.isBuffer; function objectToString(o) { return Object.prototype.toString.call(o); diff --git a/node_modules/core-util-is/package.json b/node_modules/core-util-is/package.json new file mode 100644 index 000000000..1ecbf053a --- /dev/null +++ b/node_modules/core-util-is/package.json @@ -0,0 +1,62 @@ +{ + "_from": "core-util-is@~1.0.0", + "_id": "core-util-is@1.0.2", + "_inBundle": false, + "_integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "_location": "/core-util-is", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "core-util-is@~1.0.0", + "name": "core-util-is", + "escapedName": "core-util-is", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", + "_spec": "core-util-is@~1.0.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/readable-stream", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/core-util-is/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The `util.is*` functions introduced in Node v0.12.", + "devDependencies": { + "tap": "^2.3.0" + }, + "homepage": "https://github.com/isaacs/core-util-is#readme", + "keywords": [ + "util", + "isBuffer", + "isArray", + "isNumber", + "isString", + "isRegExp", + "isThis", + "isThat", + "polyfill" + ], + "license": "MIT", + "main": "lib/util.js", + "name": "core-util-is", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/core-util-is.git" + }, + "scripts": { + "test": "tap test.js" + }, + "version": "1.0.2" +} diff --git a/node_modules/core-util-is/test.js b/node_modules/core-util-is/test.js new file mode 100644 index 000000000..1a490c65a --- /dev/null +++ b/node_modules/core-util-is/test.js @@ -0,0 +1,68 @@ +var assert = require('tap'); + +var t = require('./lib/util'); + +assert.equal(t.isArray([]), true); +assert.equal(t.isArray({}), false); + +assert.equal(t.isBoolean(null), false); +assert.equal(t.isBoolean(true), true); +assert.equal(t.isBoolean(false), true); + +assert.equal(t.isNull(null), true); +assert.equal(t.isNull(undefined), false); +assert.equal(t.isNull(false), false); +assert.equal(t.isNull(), false); + +assert.equal(t.isNullOrUndefined(null), true); +assert.equal(t.isNullOrUndefined(undefined), true); +assert.equal(t.isNullOrUndefined(false), false); +assert.equal(t.isNullOrUndefined(), true); + +assert.equal(t.isNumber(null), false); +assert.equal(t.isNumber('1'), false); +assert.equal(t.isNumber(1), true); + +assert.equal(t.isString(null), false); +assert.equal(t.isString('1'), true); +assert.equal(t.isString(1), false); + +assert.equal(t.isSymbol(null), false); +assert.equal(t.isSymbol('1'), false); +assert.equal(t.isSymbol(1), false); +assert.equal(t.isSymbol(Symbol()), true); + +assert.equal(t.isUndefined(null), false); +assert.equal(t.isUndefined(undefined), true); +assert.equal(t.isUndefined(false), false); +assert.equal(t.isUndefined(), true); + +assert.equal(t.isRegExp(null), false); +assert.equal(t.isRegExp('1'), false); +assert.equal(t.isRegExp(new RegExp()), true); + +assert.equal(t.isObject({}), true); +assert.equal(t.isObject([]), true); +assert.equal(t.isObject(new RegExp()), true); +assert.equal(t.isObject(new Date()), true); + +assert.equal(t.isDate(null), false); +assert.equal(t.isDate('1'), false); +assert.equal(t.isDate(new Date()), true); + +assert.equal(t.isError(null), false); +assert.equal(t.isError({ err: true }), false); +assert.equal(t.isError(new Error()), true); + +assert.equal(t.isFunction(null), false); +assert.equal(t.isFunction({ }), false); +assert.equal(t.isFunction(function() {}), true); + +assert.equal(t.isPrimitive(null), true); +assert.equal(t.isPrimitive(''), true); +assert.equal(t.isPrimitive(0), true); +assert.equal(t.isPrimitive(new Date()), false); + +assert.equal(t.isBuffer(null), false); +assert.equal(t.isBuffer({}), false); +assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/node_modules/express/node_modules/debug/Readme.md b/node_modules/debug/Readme.md similarity index 91% rename from node_modules/express/node_modules/debug/Readme.md rename to node_modules/debug/Readme.md index c5a34e8b8..8981f8abe 100644 --- a/node_modules/express/node_modules/debug/Readme.md +++ b/node_modules/debug/Readme.md @@ -11,7 +11,7 @@ $ npm install debug ## Usage With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility. - + Example _app.js_: ```js @@ -57,24 +57,23 @@ setInterval(function(){ ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png) - When stderr is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below: - _(NOTE: Debug now uses stderr instead of stdout, so the correct shell command for this example is actually `DEBUG=* node example/worker 2> out &`)_ - + When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below: + ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png) - + ## Conventions - If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". + If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". ## Wildcards - The "*" character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + The `*` character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. You can also exclude specific debuggers by prefixing them with a "-" character. For example, `DEBUG=* -connect:*` would include all debuggers except those starting with "connect:". ## Browser support - Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. + Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. ```js a = debug('worker:a'); @@ -89,7 +88,7 @@ setInterval(function(){ }, 1200); ``` -## License +## License (The MIT License) diff --git a/node_modules/express/node_modules/debug/debug.js b/node_modules/debug/debug.js similarity index 100% rename from node_modules/express/node_modules/debug/debug.js rename to node_modules/debug/debug.js diff --git a/node_modules/express/node_modules/debug/lib/debug.js b/node_modules/debug/lib/debug.js similarity index 85% rename from node_modules/express/node_modules/debug/lib/debug.js rename to node_modules/debug/lib/debug.js index 3b0a9183d..e7422e68a 100644 --- a/node_modules/express/node_modules/debug/lib/debug.js +++ b/node_modules/debug/lib/debug.js @@ -17,17 +17,6 @@ module.exports = debug; var names = [] , skips = []; -(process.env.DEBUG || '') - .split(/[\s,]+/) - .forEach(function(name){ - name = name.replace('*', '.*?'); - if (name[0] === '-') { - skips.push(new RegExp('^' + name.substr(1) + '$')); - } else { - names.push(new RegExp('^' + name + '$')); - } - }); - /** * Colors. */ @@ -50,7 +39,7 @@ var prevColor = 0; * Is stdout a TTY? Colored output is disabled when `true`. */ -var isatty = tty.isatty(2); +var isatty = tty.isatty(1); /** * Select a color. @@ -119,7 +108,7 @@ function debug(name) { + fmt + '\u001b[3' + c + 'm' + ' +' + humanize(ms) + '\u001b[0m'; - console.error.apply(this, arguments); + console.log.apply(this, arguments); } function plain(fmt) { @@ -127,7 +116,7 @@ function debug(name) { fmt = new Date().toUTCString() + ' ' + name + ' ' + fmt; - console.error.apply(this, arguments); + console.log.apply(this, arguments); } colored.enabled = plain.enabled = true; @@ -145,3 +134,25 @@ function coerce(val) { if (val instanceof Error) return val.stack || val.message; return val; } + +/** + * Enable specified `namespaces` for debugging. + */ + +debug.enable = function(namespaces) { + namespaces.split(/[\s,]+/) + .forEach(function(name){ + name = name.replace('*', '.*?'); + if (name[0] == '-') { + skips.push(new RegExp('^' + name.substr(1) + '$')); + } else { + names.push(new RegExp('^' + name + '$')); + } + }); +}; + +/** + * Enable namespaces listed in `process.env.DEBUG` initially. + */ + +debug.enable(process.env.DEBUG || ''); diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json new file mode 100644 index 000000000..dab0206c7 --- /dev/null +++ b/node_modules/debug/package.json @@ -0,0 +1,67 @@ +{ + "_from": "debug@>= 0.7.3 < 1", + "_id": "debug@0.8.1", + "_inBundle": false, + "_integrity": "sha1-IP9NJvXkIstoobrLu2EDmtjBwTA=", + "_location": "/debug", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "debug@>= 0.7.3 < 1", + "name": "debug", + "escapedName": "debug", + "rawSpec": ">= 0.7.3 < 1", + "saveSpec": null, + "fetchSpec": ">= 0.7.3 < 1" + }, + "_requiredBy": [ + "/connect", + "/express", + "/send" + ], + "_resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "_shasum": "20ff4d26f5e422cb68a1bacbbb61039ad8c1c130", + "_spec": "debug@>= 0.7.3 < 1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "browser": "./debug.js", + "bugs": { + "url": "https://github.com/visionmedia/debug/issues" + }, + "bundleDependencies": false, + "component": { + "scripts": { + "debug/index.js": "debug.js" + } + }, + "dependencies": {}, + "deprecated": false, + "description": "small debugging utility", + "devDependencies": { + "mocha": "*" + }, + "engines": { + "node": "*" + }, + "files": [ + "lib/debug.js", + "debug.js" + ], + "homepage": "https://github.com/visionmedia/debug#readme", + "keywords": [ + "debug", + "log", + "debugger" + ], + "main": "lib/debug.js", + "name": "debug", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "version": "0.8.1" +} diff --git a/node_modules/express/benchmarks/run b/node_modules/express/benchmarks/run old mode 100644 new mode 100755 diff --git a/node_modules/express/bin/express b/node_modules/express/bin/express old mode 100644 new mode 100755 diff --git a/node_modules/express/node_modules/buffer-crc32/package.json b/node_modules/express/node_modules/buffer-crc32/package.json deleted file mode 100644 index ed9618bc8..000000000 --- a/node_modules/express/node_modules/buffer-crc32/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "author": { - "name": "Brian J. Brennan", - "email": "brianloveswords@gmail.com", - "url": "http://bjb.io" - }, - "name": "buffer-crc32", - "description": "A pure javascript CRC32 algorithm that plays nice with binary data", - "version": "0.2.1", - "contributors": [ - { - "name": "Vladimir Kuznetsov" - } - ], - "homepage": "https://github.com/brianloveswords/buffer-crc32", - "repository": { - "type": "git", - "url": "git://github.com/brianloveswords/buffer-crc32.git" - }, - "main": "index.js", - "scripts": { - "test": "./node_modules/.bin/tap tests/*.test.js" - }, - "dependencies": {}, - "devDependencies": { - "tap": "~0.2.5" - }, - "optionalDependencies": {}, - "engines": { - "node": "*" - }, - "readme": "# buffer-crc32\n\n[![Build Status](https://secure.travis-ci.org/brianloveswords/buffer-crc32.png?branch=master)](http://travis-ci.org/brianloveswords/buffer-crc32)\n\ncrc32 that works with binary data and fancy character sets, outputs\nbuffer, signed or unsigned data and has tests.\n\nDerived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix\n\n# install\n```\nnpm install buffer-crc32\n```\n\n# example\n```js\nvar crc32 = require('buffer-crc32');\n// works with buffers\nvar buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00])\ncrc32(buf) // -> \n\n// has convenience methods for getting signed or unsigned ints\ncrc32.signed(buf) // -> -1805997238\ncrc32.unsigned(buf) // -> 2488970058\n\n// will cast to buffer if given a string, so you can\n// directly use foreign characters safely\ncrc32('自動販売機') // -> \n\n// and works in append mode too\nvar partialCrc = crc32('hey');\nvar partialCrc = crc32(' ', partialCrc);\nvar partialCrc = crc32('sup', partialCrc);\nvar partialCrc = crc32(' ', partialCrc);\nvar finalCrc = crc32('bros', partialCrc); // -> \n```\n\n# tests\nThis was tested against the output of zlib's crc32 method. You can run\nthe tests with`npm test` (requires tap)\n\n# see also\nhttps://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also\nsupports buffer inputs and return unsigned ints (thanks @tjholowaychuk).\n\n# license\nMIT/X11\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/brianloveswords/buffer-crc32/issues" - }, - "_id": "buffer-crc32@0.2.1", - "dist": { - "shasum": "69c3a8d64a2371a06465b1766786725de5896116" - }, - "_from": "buffer-crc32@0.2.1", - "_resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz" -} diff --git a/node_modules/express/node_modules/commander/node_modules/keypress/package.json b/node_modules/express/node_modules/commander/node_modules/keypress/package.json deleted file mode 100644 index c5462052c..000000000 --- a/node_modules/express/node_modules/commander/node_modules/keypress/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "keypress", - "version": "0.1.0", - "description": "Make any Node ReadableStream emit \"keypress\" events", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://tootallnate.net" - }, - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/keypress.git" - }, - "keywords": [ - "keypress", - "readline", - "core" - ], - "license": "MIT", - "readme": "keypress\n========\n### Make any Node ReadableStream emit \"keypress\" events\n\n\nPrevious to Node `v0.8.x`, there was an undocumented `\"keypress\"` event that\n`process.stdin` would emit when it was a TTY. Some people discovered this hidden\ngem, and started using it in their own code.\n\nNow in Node `v0.8.x`, this `\"keypress\"` event does not get emitted by default,\nbut rather only when it is being used in conjuction with the `readline` (or by\nextension, the `repl`) module.\n\nThis module is the exact logic from the node `v0.8.x` releases ripped out into its\nown module.\n\n__Bonus:__ Now with mouse support!\n\nInstallation\n------------\n\nInstall with `npm`:\n\n``` bash\n$ npm install keypress\n```\n\nOr add it to the `\"dependencies\"` section of your _package.json_ file.\n\n\nExample\n-------\n\n#### Listening for \"keypress\" events\n\n``` js\nvar keypress = require('keypress');\n\n// make `process.stdin` begin emitting \"keypress\" events\nkeypress(process.stdin);\n\n// listen for the \"keypress\" event\nprocess.stdin.on('keypress', function (ch, key) {\n console.log('got \"keypress\"', key);\n if (key && key.ctrl && key.name == 'c') {\n process.stdin.pause();\n }\n});\n\nprocess.stdin.setRawMode(true);\nprocess.stdin.resume();\n```\n\n#### Listening for \"mousepress\" events\n\n``` js\nvar keypress = require('keypress');\n\n// make `process.stdin` begin emitting \"mousepress\" (and \"keypress\") events\nkeypress(process.stdin);\n\n// you must enable the mouse events before they will begin firing\nkeypress.enableMouse(process.stdout);\n\nprocess.stdin.on('mousepress', function (info) {\n console.log('got \"mousepress\" event at %d x %d', info.x, info.y);\n});\n\nprocess.on('exit', function () {\n // disable mouse on exit, so that the state\n // is back to normal for the terminal\n keypress.disableMouse(process.stdout);\n});\n```\n\n\nLicense\n-------\n\n(The MIT License)\n\nCopyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/TooTallNate/keypress/issues" - }, - "homepage": "https://github.com/TooTallNate/keypress", - "_id": "keypress@0.1.0", - "_from": "keypress@0.1.x" -} diff --git a/node_modules/express/node_modules/commander/package.json b/node_modules/express/node_modules/commander/package.json deleted file mode 100644 index e156bc171..000000000 --- a/node_modules/express/node_modules/commander/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "commander", - "version": "1.3.2", - "description": "the complete solution for node.js command-line programs", - "keywords": [ - "command", - "option", - "parser", - "prompt", - "stdin" - ], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "repository": { - "type": "git", - "url": "https://github.com/visionmedia/commander.js.git" - }, - "dependencies": { - "keypress": "0.1.x" - }, - "devDependencies": { - "should": ">= 0.0.1" - }, - "scripts": { - "test": "make test" - }, - "main": "index", - "engines": { - "node": ">= 0.6.x" - }, - "readme": "# Commander.js\n\n The complete solution for [node.js](http://nodejs.org) command-line interfaces, inspired by Ruby's [commander](https://github.com/visionmedia/commander).\n\n [![Build Status](https://secure.travis-ci.org/visionmedia/commander.js.png)](http://travis-ci.org/visionmedia/commander.js)\n\n## Installation\n\n $ npm install commander\n\n## Option parsing\n\n Options with commander are defined with the `.option()` method, also serving as documentation for the options. The example below parses args and options from `process.argv`, leaving remaining args as the `program.args` array which were not consumed by options.\n\n```js\n#!/usr/bin/env node\n\n/**\n * Module dependencies.\n */\n\nvar program = require('commander');\n\nprogram\n .version('0.0.1')\n .option('-p, --peppers', 'Add peppers')\n .option('-P, --pineapple', 'Add pineapple')\n .option('-b, --bbq', 'Add bbq sauce')\n .option('-c, --cheese [type]', 'Add the specified type of cheese [marble]', 'marble')\n .parse(process.argv);\n\nconsole.log('you ordered a pizza with:');\nif (program.peppers) console.log(' - peppers');\nif (program.pineapple) console.log(' - pineapple');\nif (program.bbq) console.log(' - bbq');\nconsole.log(' - %s cheese', program.cheese);\n```\n\n Short flags may be passed as a single arg, for example `-abc` is equivalent to `-a -b -c`. Multi-word options such as \"--template-engine\" are camel-cased, becoming `program.templateEngine` etc.\n\n## Automated --help\n\n The help information is auto-generated based on the information commander already knows about your program, so the following `--help` info is for free:\n\n``` \n $ ./examples/pizza --help\n\n Usage: pizza [options]\n\n Options:\n\n -V, --version output the version number\n -p, --peppers Add peppers\n -P, --pineapple Add pineapple\n -b, --bbq Add bbq sauce\n -c, --cheese Add the specified type of cheese [marble]\n -h, --help output usage information\n\n```\n\n## Coercion\n\n```js\nfunction range(val) {\n return val.split('..').map(Number);\n}\n\nfunction list(val) {\n return val.split(',');\n}\n\nprogram\n .version('0.0.1')\n .usage('[options] ')\n .option('-i, --integer ', 'An integer argument', parseInt)\n .option('-f, --float ', 'A float argument', parseFloat)\n .option('-r, --range ..', 'A range', range)\n .option('-l, --list ', 'A list', list)\n .option('-o, --optional [value]', 'An optional value')\n .parse(process.argv);\n\nconsole.log(' int: %j', program.integer);\nconsole.log(' float: %j', program.float);\nconsole.log(' optional: %j', program.optional);\nprogram.range = program.range || [];\nconsole.log(' range: %j..%j', program.range[0], program.range[1]);\nconsole.log(' list: %j', program.list);\nconsole.log(' args: %j', program.args);\n```\n\n## Custom help\n\n You can display arbitrary `-h, --help` information\n by listening for \"--help\". Commander will automatically\n exit once you are done so that the remainder of your program\n does not execute causing undesired behaviours, for example\n in the following executable \"stuff\" will not output when\n `--help` is used.\n\n```js\n#!/usr/bin/env node\n\n/**\n * Module dependencies.\n */\n\nvar program = require('../');\n\nfunction list(val) {\n return val.split(',').map(Number);\n}\n\nprogram\n .version('0.0.1')\n .option('-f, --foo', 'enable some foo')\n .option('-b, --bar', 'enable some bar')\n .option('-B, --baz', 'enable some baz');\n\n// must be before .parse() since\n// node's emit() is immediate\n\nprogram.on('--help', function(){\n console.log(' Examples:');\n console.log('');\n console.log(' $ custom-help --help');\n console.log(' $ custom-help -h');\n console.log('');\n});\n\nprogram.parse(process.argv);\n\nconsole.log('stuff');\n```\n\nyielding the following help output:\n\n```\n\nUsage: custom-help [options]\n\nOptions:\n\n -h, --help output usage information\n -V, --version output the version number\n -f, --foo enable some foo\n -b, --bar enable some bar\n -B, --baz enable some baz\n\nExamples:\n\n $ custom-help --help\n $ custom-help -h\n\n```\n\n## .prompt(msg, fn)\n\n Single-line prompt:\n\n```js\nprogram.prompt('name: ', function(name){\n console.log('hi %s', name);\n});\n```\n\n Multi-line prompt:\n\n```js\nprogram.prompt('description:', function(name){\n console.log('hi %s', name);\n});\n```\n\n Coercion:\n\n```js\nprogram.prompt('Age: ', Number, function(age){\n console.log('age: %j', age);\n});\n```\n\n```js\nprogram.prompt('Birthdate: ', Date, function(date){\n console.log('date: %s', date);\n});\n```\n\n```js\nprogram.prompt('Email: ', /^.+@.+\\..+$/, function(email){\n console.log('email: %j', email);\n});\n```\n\n## .password(msg[, mask], fn)\n\nPrompt for password without echoing:\n\n```js\nprogram.password('Password: ', function(pass){\n console.log('got \"%s\"', pass);\n process.stdin.destroy();\n});\n```\n\nPrompt for password with mask char \"*\":\n\n```js\nprogram.password('Password: ', '*', function(pass){\n console.log('got \"%s\"', pass);\n process.stdin.destroy();\n});\n```\n\n## .confirm(msg, fn)\n\n Confirm with the given `msg`:\n\n```js\nprogram.confirm('continue? ', function(ok){\n console.log(' got %j', ok);\n});\n```\n\n## .choose(list, fn)\n\n Let the user choose from a `list`:\n\n```js\nvar list = ['tobi', 'loki', 'jane', 'manny', 'luna'];\n\nconsole.log('Choose the coolest pet:');\nprogram.choose(list, function(i){\n console.log('you chose %d \"%s\"', i, list[i]);\n});\n```\n\n## .outputHelp()\n\n Output help information without exiting.\n\n## .help()\n\n Output help information and exit immediately.\n\n## Links\n\n - [API documentation](http://visionmedia.github.com/commander.js/)\n - [ascii tables](https://github.com/LearnBoost/cli-table)\n - [progress bars](https://github.com/visionmedia/node-progress)\n - [more progress bars](https://github.com/substack/node-multimeter)\n - [examples](https://github.com/visionmedia/commander.js/tree/master/examples)\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/commander.js/issues" - }, - "homepage": "https://github.com/visionmedia/commander.js", - "_id": "commander@1.3.2", - "dist": { - "shasum": "a90f55ff9e0fa77d38694e2f5e906655da13a655" - }, - "_from": "commander@1.3.2", - "_resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/batch/package.json b/node_modules/express/node_modules/connect/node_modules/batch/package.json deleted file mode 100644 index 27bdc188a..000000000 --- a/node_modules/express/node_modules/connect/node_modules/batch/package.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "name": "batch", - "version": "0.5.0", - "description": "Simple async batch", - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "main": "index", - "readme": "\n# batch\n\n Simple async batch with concurrency control and progress reporting.\n\n## Installation\n\n```\n$ npm install batch\n```\n\n## API\n\n```js\nvar Batch = require('batch')\n , batch = new Batch;\n\nbatch.concurrency(4);\n\nids.forEach(function(id){\n batch.push(function(done){\n User.get(id, done);\n });\n});\n\nbatch.on('progress', function(e){\n\n});\n\nbatch.end(function(err, users){\n\n});\n```\n\n### Progress events\n\n Contain the \"job\" index, response value, duration information, and completion data.\n\n```js\n{ index: 1,\n value: 'bar',\n pending: 2,\n total: 3,\n complete: 2,\n percent: 66,\n start: Thu Oct 04 2012 12:25:53 GMT-0700 (PDT),\n end: Thu Oct 04 2012 12:25:53 GMT-0700 (PDT),\n duration: 0 }\n```\n\n## License\n\n(The MIT License)\n\nCopyright (c) 2013 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "_id": "batch@0.5.0", - "dist": { - "shasum": "e3480edd5ad84f92cf61240bfc7a6822506275ab" - }, - "_from": "batch@0.5.0", - "_resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/bytes/package.json b/node_modules/express/node_modules/connect/node_modules/bytes/package.json deleted file mode 100644 index 83230954e..000000000 --- a/node_modules/express/node_modules/connect/node_modules/bytes/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "bytes", - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca", - "url": "http://tjholowaychuk.com" - }, - "description": "byte size string parser / serializer", - "version": "0.2.1", - "main": "index.js", - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "component": { - "scripts": { - "bytes/index.js": "index.js" - } - }, - "readme": "# node-bytes\n\n Byte string parser / formatter.\n\n## Example:\n\n```js\nbytes('1kb')\n// => 1024\n\nbytes('2mb')\n// => 2097152\n\nbytes('1gb')\n// => 1073741824\n\nbytes(1073741824)\n// => 1gb\n```\n\n## Installation\n\n```\n$ npm install bytes\n$ component install visionmedia/bytes.js\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "_id": "bytes@0.2.1", - "dist": { - "shasum": "13114f18ead29c4ada0a4e6e383feb87a526fe40" - }, - "_from": "bytes@0.2.1", - "_resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/README.md b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/README.md deleted file mode 100644 index be976683e..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/README.md +++ /dev/null @@ -1,768 +0,0 @@ -# readable-stream - -A new class of streams for Node.js - -This module provides the new Stream base classes introduced in Node -v0.10, for use in Node v0.8. You can use it to have programs that -have to work with node v0.8, while being forward-compatible for v0.10 -and beyond. When you drop support for v0.8, you can remove this -module, and only use the native streams. - -This is almost exactly the same codebase as appears in Node v0.10. -However: - -1. The exported object is actually the Readable class. Decorating the - native `stream` module would be global pollution. -2. In v0.10, you can safely use `base64` as an argument to - `setEncoding` in Readable streams. However, in v0.8, the - StringDecoder class has no `end()` method, which is problematic for - Base64. So, don't use that, because it'll break and be weird. - -Other than that, the API is the same as `require('stream')` in v0.10, -so the API docs are reproduced below. - ----------- - - Stability: 2 - Unstable - -A stream is an abstract interface implemented by various objects in -Node. For example a request to an HTTP server is a stream, as is -stdout. Streams are readable, writable, or both. All streams are -instances of [EventEmitter][] - -You can load the Stream base classes by doing `require('stream')`. -There are base classes provided for Readable streams, Writable -streams, Duplex streams, and Transform streams. - -## Compatibility - -In earlier versions of Node, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the `read()` method, `'data'` - events would start emitting immediately. If you needed to do some - I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The `pause()` method was advisory, rather than guaranteed. This - meant that you still had to be prepared to receive `'data'` events - even when the stream was in a paused state. - -In Node v0.10, the Readable class described below was added. For -backwards compatibility with older Node programs, Readable streams -switch into "old mode" when a `'data'` event handler is added, or when -the `pause()` or `resume()` methods are called. The effect is that, -even if you are not using the new `read()` method and `'readable'` -event, you no longer have to worry about losing `'data'` chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No `'data'` event handler is added. -* The `pause()` and `resume()` methods are never called. - -For example, consider the following code: - -```javascript -// WARNING! BROKEN! -net.createServer(function(socket) { - - // we add an 'end' method, but never consume the data - socket.on('end', function() { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of node prior to v0.10, the incoming message data would be -simply discarded. However, in Node v0.10 and beyond, the socket will -remain paused forever. - -The workaround in this situation is to call the `resume()` method to -trigger "old mode" behavior: - -```javascript -// Workaround -net.createServer(function(socket) { - - socket.on('end', function() { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into old-mode, pre-v0.10 -style streams can be wrapped in a Readable class using the `wrap()` -method. - -## Class: stream.Readable - - - -A `Readable Stream` has the following methods, members, and events. - -Note that `stream.Readable` is an abstract class designed to be -extended with an underlying implementation of the `_read(size)` -method. (See below.) - -### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default=16kb - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default=null - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that stream.read(n) returns - a single value instead of a Buffer of size n - -In classes that extend the Readable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **This function should NOT be called directly.** It should be -implemented by child classes, and called by the internal Readable -class methods only. - -All Readable stream implementations must provide a `_read` method -to fetch data from the underlying resource. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -When data is available, put it into the read queue by calling -`readable.push(chunk)`. If `push` returns false, then you should stop -reading. When `_read` is called again, you should start pushing more -data. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling `stream.push(chunk)`. - -### readable.push(chunk) - -* `chunk` {Buffer | null | String} Chunk of data to push into the read queue -* return {Boolean} Whether or not more pushes should be performed - -Note: **This function should be called by Readable implementors, NOT -by consumers of Readable subclasses.** The `_read()` function will not -be called again until at least one `push(chunk)` call is made. If no -data is available, then you MAY call `push('')` (an empty string) to -allow a future `_read` call, without adding any data to the queue. - -The `Readable` class works by putting data into a read queue to be -pulled out later by calling the `read()` method when the `'readable'` -event fires. - -The `push()` method will explicitly insert some data into the read -queue. If it is called with `null` then it will signal the end of the -data. - -In some cases, you may be wrapping a lower-level source which has some -sort of pause/resume mechanism, and a data callback. In those cases, -you could wrap the low-level source object by doing something like -this: - -```javascript -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -var stream = new Readable(); - -source.ondata = function(chunk) { - // if push() returns false, then we need to stop reading from source - if (!stream.push(chunk)) - source.readStop(); -}; - -source.onend = function() { - stream.push(null); -}; - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -stream._read = function(n) { - source.readStart(); -}; -``` - -### readable.unshift(chunk) - -* `chunk` {Buffer | null | String} Chunk of data to unshift onto the read queue -* return {Boolean} Whether or not more pushes should be performed - -This is the corollary of `readable.push(chunk)`. Rather than putting -the data at the *end* of the read queue, it puts it at the *front* of -the read queue. - -This is useful in certain use-cases where a stream is being consumed -by a parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source. - -```javascript -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// Note: This can be done more simply as a Transform stream. See below. - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', function() { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', function() { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype = Object.create( - Readable.prototype, { constructor: { value: SimpleProtocol }}); - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -If you are using an older Node library that emits `'data'` events and -has a `pause()` method that is advisory only, then you can use the -`wrap()` method to create a Readable stream that uses the old stream -as its data source. - -For example: - -```javascript -var OldReader = require('./old-api-module.js').OldReader; -var oreader = new OldReader; -var Readable = require('stream').Readable; -var myReader = new Readable().wrap(oreader); - -myReader.on('readable', function() { - myReader.read(); // etc. -}); -``` - -### Event: 'readable' - -When there is data ready to be consumed, this event will fire. - -When this event emits, call the `read()` method to consume the data. - -### Event: 'end' - -Emitted when the stream has received an EOF (FIN in TCP terminology). -Indicates that no more `'data'` events will happen. If the stream is -also writable, it may be possible to continue writing. - -### Event: 'data' - -The `'data'` event emits either a `Buffer` (by default) or a string if -`setEncoding()` was used. - -Note that adding a `'data'` event listener will switch the Readable -stream into "old mode", where data is emitted as soon as it is -available, rather than waiting for you to call `read()` to consume it. - -### Event: 'error' - -Emitted if there was an error receiving data. - -### Event: 'close' - -Emitted when the underlying resource (for example, the backing file -descriptor) has been closed. Not all streams will emit this. - -### readable.setEncoding(encoding) - -Makes the `'data'` event emit a string instead of a `Buffer`. `encoding` -can be `'utf8'`, `'utf16le'` (`'ucs2'`), `'ascii'`, or `'hex'`. - -The encoding can also be set by specifying an `encoding` field to the -constructor. - -### readable.read([size]) - -* `size` {Number | null} Optional number of bytes to read. -* Return: {Buffer | String | null} - -Note: **This function SHOULD be called by Readable stream users.** - -Call this method to consume data once the `'readable'` event is -emitted. - -The `size` argument will set a minimum number of bytes that you are -interested in. If not set, then the entire content of the internal -buffer is returned. - -If there is no data to consume, or if there are fewer bytes in the -internal buffer than the `size` argument, then `null` is returned, and -a future `'readable'` event will be emitted when more is available. - -Calling `stream.read(0)` will always return `null`, and will trigger a -refresh of the internal buffer, but otherwise be a no-op. - -### readable.pipe(destination, [options]) - -* `destination` {Writable Stream} -* `options` {Object} Optional - * `end` {Boolean} Default=true - -Connects this readable stream to `destination` WriteStream. Incoming -data on this stream gets written to `destination`. Properly manages -back-pressure so that a slow destination will not be overwhelmed by a -fast readable stream. - -This function returns the `destination` stream. - -For example, emulating the Unix `cat` command: - - process.stdin.pipe(process.stdout); - -By default `end()` is called on the destination when the source stream -emits `end`, so that `destination` is no longer writable. Pass `{ end: -false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - - reader.pipe(writer, { end: false }); - reader.on("end", function() { - writer.end("Goodbye\n"); - }); - -Note that `process.stderr` and `process.stdout` are never closed until -the process exits, regardless of the specified options. - -### readable.unpipe([destination]) - -* `destination` {Writable Stream} Optional - -Undo a previously established `pipe()`. If no destination is -provided, then all previously established pipes are removed. - -### readable.pause() - -Switches the readable stream into "old mode", where data is emitted -using a `'data'` event rather than being buffered for consumption via -the `read()` method. - -Ceases the flow of data. No `'data'` events are emitted while the -stream is in a paused state. - -### readable.resume() - -Switches the readable stream into "old mode", where data is emitted -using a `'data'` event rather than being buffered for consumption via -the `read()` method. - -Resumes the incoming `'data'` events after a `pause()`. - - -## Class: stream.Writable - - - -A `Writable` Stream has the following methods, members, and events. - -Note that `stream.Writable` is an abstract class designed to be -extended with an underlying implementation of the -`_write(chunk, encoding, cb)` method. (See below.) - -### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when `write()` starts - returning false. Default=16kb - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to `_write()`. Default=true - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer | String} The chunk to be written. Will always - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. Ignore chunk is a buffer. Note that chunk will - **always** be a buffer unless the `decodeStrings` option is - explicitly set to `false`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a `_write` method to -send data to the underlying resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - - -### writable.write(chunk, [encoding], [callback]) - -* `chunk` {Buffer | String} Data to be written -* `encoding` {String} Optional. If `chunk` is a string, then encoding - defaults to `'utf8'` -* `callback` {Function} Optional. Called when this chunk is - successfully written. -* Returns {Boolean} - -Writes `chunk` to the stream. Returns `true` if the data has been -flushed to the underlying resource. Returns `false` to indicate that -the buffer is full, and the data will be sent out in the future. The -`'drain'` event will indicate when the buffer is empty again. - -The specifics of when `write()` will return false, is determined by -the `highWaterMark` option provided to the constructor. - -### writable.end([chunk], [encoding], [callback]) - -* `chunk` {Buffer | String} Optional final data to be written -* `encoding` {String} Optional. If `chunk` is a string, then encoding - defaults to `'utf8'` -* `callback` {Function} Optional. Called when the final chunk is - successfully written. - -Call this method to signal the end of the data being written to the -stream. - -### Event: 'drain' - -Emitted when the stream's write queue empties and it's safe to write -without buffering again. Listen for it when `stream.write()` returns -`false`. - -### Event: 'close' - -Emitted when the underlying resource (for example, the backing file -descriptor) has been closed. Not all streams will emit this. - -### Event: 'finish' - -When `end()` is called and there are no more chunks to write, this -event is emitted. - -### Event: 'pipe' - -* `source` {Readable Stream} - -Emitted when the stream is passed to a readable stream's pipe method. - -### Event 'unpipe' - -* `source` {Readable Stream} - -Emitted when a previously established `pipe()` is removed using the -source Readable stream's `unpipe()` method. - -## Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a -TCP socket connection. - -Note that `stream.Duplex` is an abstract class designed to be -extended with an underlying implementation of the `_read(size)` -and `_write(chunk, encoding, callback)` methods as you would with a Readable or -Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this -class prototypally inherits from Readable, and then parasitically from -Writable. It is thus up to the user to implement both the lowlevel -`_read(n)` method as well as the lowlevel `_write(chunk, encoding, cb)` method -on extension duplex classes. - -### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -## Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a zlib stream or a crypto -stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will either produce -much smaller or much larger than its input. - -Rather than implement the `_read()` and `_write()` methods, Transform -classes must implement the `_transform()` method, and may optionally -also implement the `_flush()` method. (See below.) - -### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer | String} The chunk to be transformed. Will always - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. (Ignore if `decodeStrings` chunk is a buffer.) -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform` -method to accept input and produce output. - -`_transform` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush` method, which will be -called at the very end, after all the written data is consumed, but -before emitting `end` to signal the end of the readable side. Just -like with `_transform`, call `transform.push(chunk)` zero or more -times, as appropriate, and call `callback` when the flush operation is -complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -### Example: `SimpleProtocol` parser - -The example above of a simple protocol parser can be implemented much -more simply by using the higher level `Transform` stream class. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node stream -approach. - -```javascript -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype = Object.create( - Transform.prototype, { constructor: { value: SimpleProtocol }}); - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(b); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(b); - } - done(); -}; - -var parser = new SimpleProtocol(); -source.pipe(parser) - -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - - -## Class: stream.PassThrough - -This is a trivial implementation of a `Transform` stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy. - - -[EventEmitter]: events.html#events_class_events_eventemitter diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/lib/util.js deleted file mode 100644 index 9074e8ebc..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/lib/util.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. -function isArray(ar) { - return Array.isArray(ar); -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return isObject(re) && objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return isObject(d) && objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return isObject(e) && - (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -function isBuffer(arg) { - return Buffer.isBuffer(arg); -} -exports.isBuffer = isBuffer; - -function objectToString(o) { - return Object.prototype.toString.call(o); -} \ No newline at end of file diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/package.json b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/package.json deleted file mode 100644 index cb9aa927d..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/core-util-is/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "core-util-is", - "version": "1.0.1", - "description": "The `util.is*` functions introduced in Node v0.12.", - "main": "lib/util.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/core-util-is" - }, - "keywords": [ - "util", - "isBuffer", - "isArray", - "isNumber", - "isString", - "isRegExp", - "isThis", - "isThat", - "polyfill" - ], - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": "MIT", - "bugs": { - "url": "https://github.com/isaacs/core-util-is/issues" - }, - "readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n", - "readmeFilename": "README.md", - "homepage": "https://github.com/isaacs/core-util-is", - "_id": "core-util-is@1.0.1", - "_from": "core-util-is@~1.0.0" -} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/README.md b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/README.md deleted file mode 100644 index dc6fccecc..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# debuglog - backport of util.debuglog() from node v0.11 - -To facilitate using the `util.debuglog()` function that will be available when -node v0.12 is released now, this is a copy extracted from the source. - -## require('debuglog') - -Return `util.debuglog`, if it exists, otherwise it will return an internal copy -of the implementation from node v0.11. - -## debuglog(section) - -* `section` {String} The section of the program to be debugged -* Returns: {Function} The logging function - -This is used to create a function which conditionally writes to stderr -based on the existence of a `NODE_DEBUG` environment variable. If the -`section` name appears in that environment variable, then the returned -function will be similar to `console.error()`. If not, then the -returned function is a no-op. - -For example: - -```javascript -var debuglog = util.debuglog('foo'); - -var bar = 123; -debuglog('hello from foo [%d]', bar); -``` - -If this program is run with `NODE_DEBUG=foo` in the environment, then -it will output something like: - - FOO 3245: hello from foo [123] - -where `3245` is the process id. If it is not run with that -environment variable set, then it will not print anything. - -You may separate multiple `NODE_DEBUG` environment variables with a -comma. For example, `NODE_DEBUG=fs,net,tls`. diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/debuglog.js b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/debuglog.js deleted file mode 100644 index da465c294..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/debuglog.js +++ /dev/null @@ -1,22 +0,0 @@ -var util = require('util'); - -module.exports = util.debuglog || debuglog; - -var debugs = {}; -var debugEnviron = process.env.NODE_DEBUG || ''; - -function debuglog(set) { - set = set.toUpperCase(); - if (!debugs[set]) { - if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { - var pid = process.pid; - debugs[set] = function() { - var msg = util.format.apply(exports, arguments); - console.error('%s %d: %s', set, pid, msg); - }; - } else { - debugs[set] = function() {}; - } - } - return debugs[set]; -}; diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/package.json b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/package.json deleted file mode 100644 index b54332bec..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/node_modules/debuglog/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "debuglog", - "version": "0.0.2", - "description": "backport of util.debuglog from node v0.11", - "license": "MIT", - "main": "debuglog.js", - "repository": { - "type": "git", - "url": "https://github.com/sam-github/node-debuglog.git" - }, - "author": { - "name": "Sam Roberts", - "email": "sam@strongloop.com" - }, - "engines": { - "node": "*" - }, - "readme": "# debuglog - backport of util.debuglog() from node v0.11\n\nTo facilitate using the `util.debuglog()` function that will be available when\nnode v0.12 is released now, this is a copy extracted from the source.\n\n## require('debuglog')\n\nReturn `util.debuglog`, if it exists, otherwise it will return an internal copy\nof the implementation from node v0.11.\n\n## debuglog(section)\n\n* `section` {String} The section of the program to be debugged\n* Returns: {Function} The logging function\n\nThis is used to create a function which conditionally writes to stderr\nbased on the existence of a `NODE_DEBUG` environment variable. If the\n`section` name appears in that environment variable, then the returned\nfunction will be similar to `console.error()`. If not, then the\nreturned function is a no-op.\n\nFor example:\n\n```javascript\nvar debuglog = util.debuglog('foo');\n\nvar bar = 123;\ndebuglog('hello from foo [%d]', bar);\n```\n\nIf this program is run with `NODE_DEBUG=foo` in the environment, then\nit will output something like:\n\n FOO 3245: hello from foo [123]\n\nwhere `3245` is the process id. If it is not run with that\nenvironment variable set, then it will not print anything.\n\nYou may separate multiple `NODE_DEBUG` environment variables with a\ncomma. For example, `NODE_DEBUG=fs,net,tls`.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/sam-github/node-debuglog/issues" - }, - "homepage": "https://github.com/sam-github/node-debuglog", - "_id": "debuglog@0.0.2", - "dist": { - "shasum": "6b3ca12c2f07006f68eb7450aa109ff798434777" - }, - "_from": "debuglog@0.0.2", - "_resolved": "https://registry.npmjs.org/debuglog/-/debuglog-0.0.2.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/package.json b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/package.json deleted file mode 100644 index 49566cdbd..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "readable-stream", - "version": "1.1.10", - "description": "An exploration of a new kind of readable streams for Node.js", - "main": "readable.js", - "dependencies": { - "core-util-is": "~1.0.0", - "debuglog": "0.0.2" - }, - "devDependencies": { - "tap": "~0.2.6" - }, - "scripts": { - "test": "tap test/simple/*.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/readable-stream" - }, - "keywords": [ - "readable", - "stream", - "pipe" - ], - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": "MIT", - "optionalDependencies": { - "debuglog": "0.0.2" - }, - "readme": "# readable-stream\n\nA new class of streams for Node.js\n\nThis module provides the new Stream base classes introduced in Node\nv0.10, for use in Node v0.8. You can use it to have programs that\nhave to work with node v0.8, while being forward-compatible for v0.10\nand beyond. When you drop support for v0.8, you can remove this\nmodule, and only use the native streams.\n\nThis is almost exactly the same codebase as appears in Node v0.10.\nHowever:\n\n1. The exported object is actually the Readable class. Decorating the\n native `stream` module would be global pollution.\n2. In v0.10, you can safely use `base64` as an argument to\n `setEncoding` in Readable streams. However, in v0.8, the\n StringDecoder class has no `end()` method, which is problematic for\n Base64. So, don't use that, because it'll break and be weird.\n\nOther than that, the API is the same as `require('stream')` in v0.10,\nso the API docs are reproduced below.\n\n----------\n\n Stability: 2 - Unstable\n\nA stream is an abstract interface implemented by various objects in\nNode. For example a request to an HTTP server is a stream, as is\nstdout. Streams are readable, writable, or both. All streams are\ninstances of [EventEmitter][]\n\nYou can load the Stream base classes by doing `require('stream')`.\nThere are base classes provided for Readable streams, Writable\nstreams, Duplex streams, and Transform streams.\n\n## Compatibility\n\nIn earlier versions of Node, the Readable stream interface was\nsimpler, but also less powerful and less useful.\n\n* Rather than waiting for you to call the `read()` method, `'data'`\n events would start emitting immediately. If you needed to do some\n I/O to decide how to handle data, then you had to store the chunks\n in some kind of buffer so that they would not be lost.\n* The `pause()` method was advisory, rather than guaranteed. This\n meant that you still had to be prepared to receive `'data'` events\n even when the stream was in a paused state.\n\nIn Node v0.10, the Readable class described below was added. For\nbackwards compatibility with older Node programs, Readable streams\nswitch into \"old mode\" when a `'data'` event handler is added, or when\nthe `pause()` or `resume()` methods are called. The effect is that,\neven if you are not using the new `read()` method and `'readable'`\nevent, you no longer have to worry about losing `'data'` chunks.\n\nMost programs will continue to function normally. However, this\nintroduces an edge case in the following conditions:\n\n* No `'data'` event handler is added.\n* The `pause()` and `resume()` methods are never called.\n\nFor example, consider the following code:\n\n```javascript\n// WARNING! BROKEN!\nnet.createServer(function(socket) {\n\n // we add an 'end' method, but never consume the data\n socket.on('end', function() {\n // It will never get here.\n socket.end('I got your message (but didnt read it)\\n');\n });\n\n}).listen(1337);\n```\n\nIn versions of node prior to v0.10, the incoming message data would be\nsimply discarded. However, in Node v0.10 and beyond, the socket will\nremain paused forever.\n\nThe workaround in this situation is to call the `resume()` method to\ntrigger \"old mode\" behavior:\n\n```javascript\n// Workaround\nnet.createServer(function(socket) {\n\n socket.on('end', function() {\n socket.end('I got your message (but didnt read it)\\n');\n });\n\n // start the flow of data, discarding it.\n socket.resume();\n\n}).listen(1337);\n```\n\nIn addition to new Readable streams switching into old-mode, pre-v0.10\nstyle streams can be wrapped in a Readable class using the `wrap()`\nmethod.\n\n## Class: stream.Readable\n\n\n\nA `Readable Stream` has the following methods, members, and events.\n\nNote that `stream.Readable` is an abstract class designed to be\nextended with an underlying implementation of the `_read(size)`\nmethod. (See below.)\n\n### new stream.Readable([options])\n\n* `options` {Object}\n * `highWaterMark` {Number} The maximum number of bytes to store in\n the internal buffer before ceasing to read from the underlying\n resource. Default=16kb\n * `encoding` {String} If specified, then buffers will be decoded to\n strings using the specified encoding. Default=null\n * `objectMode` {Boolean} Whether this stream should behave\n as a stream of objects. Meaning that stream.read(n) returns\n a single value instead of a Buffer of size n\n\nIn classes that extend the Readable class, make sure to call the\nconstructor so that the buffering settings can be properly\ninitialized.\n\n### readable.\\_read(size)\n\n* `size` {Number} Number of bytes to read asynchronously\n\nNote: **This function should NOT be called directly.** It should be\nimplemented by child classes, and called by the internal Readable\nclass methods only.\n\nAll Readable stream implementations must provide a `_read` method\nto fetch data from the underlying resource.\n\nThis method is prefixed with an underscore because it is internal to\nthe class that defines it, and should not be called directly by user\nprograms. However, you **are** expected to override this method in\nyour own extension classes.\n\nWhen data is available, put it into the read queue by calling\n`readable.push(chunk)`. If `push` returns false, then you should stop\nreading. When `_read` is called again, you should start pushing more\ndata.\n\nThe `size` argument is advisory. Implementations where a \"read\" is a\nsingle call that returns data can use this to know how much data to\nfetch. Implementations where that is not relevant, such as TCP or\nTLS, may ignore this argument, and simply provide data whenever it\nbecomes available. There is no need, for example to \"wait\" until\n`size` bytes are available before calling `stream.push(chunk)`.\n\n### readable.push(chunk)\n\n* `chunk` {Buffer | null | String} Chunk of data to push into the read queue\n* return {Boolean} Whether or not more pushes should be performed\n\nNote: **This function should be called by Readable implementors, NOT\nby consumers of Readable subclasses.** The `_read()` function will not\nbe called again until at least one `push(chunk)` call is made. If no\ndata is available, then you MAY call `push('')` (an empty string) to\nallow a future `_read` call, without adding any data to the queue.\n\nThe `Readable` class works by putting data into a read queue to be\npulled out later by calling the `read()` method when the `'readable'`\nevent fires.\n\nThe `push()` method will explicitly insert some data into the read\nqueue. If it is called with `null` then it will signal the end of the\ndata.\n\nIn some cases, you may be wrapping a lower-level source which has some\nsort of pause/resume mechanism, and a data callback. In those cases,\nyou could wrap the low-level source object by doing something like\nthis:\n\n```javascript\n// source is an object with readStop() and readStart() methods,\n// and an `ondata` member that gets called when it has data, and\n// an `onend` member that gets called when the data is over.\n\nvar stream = new Readable();\n\nsource.ondata = function(chunk) {\n // if push() returns false, then we need to stop reading from source\n if (!stream.push(chunk))\n source.readStop();\n};\n\nsource.onend = function() {\n stream.push(null);\n};\n\n// _read will be called when the stream wants to pull more data in\n// the advisory size argument is ignored in this case.\nstream._read = function(n) {\n source.readStart();\n};\n```\n\n### readable.unshift(chunk)\n\n* `chunk` {Buffer | null | String} Chunk of data to unshift onto the read queue\n* return {Boolean} Whether or not more pushes should be performed\n\nThis is the corollary of `readable.push(chunk)`. Rather than putting\nthe data at the *end* of the read queue, it puts it at the *front* of\nthe read queue.\n\nThis is useful in certain use-cases where a stream is being consumed\nby a parser, which needs to \"un-consume\" some data that it has\noptimistically pulled out of the source.\n\n```javascript\n// A parser for a simple data protocol.\n// The \"header\" is a JSON object, followed by 2 \\n characters, and\n// then a message body.\n//\n// Note: This can be done more simply as a Transform stream. See below.\n\nfunction SimpleProtocol(source, options) {\n if (!(this instanceof SimpleProtocol))\n return new SimpleProtocol(options);\n\n Readable.call(this, options);\n this._inBody = false;\n this._sawFirstCr = false;\n\n // source is a readable stream, such as a socket or file\n this._source = source;\n\n var self = this;\n source.on('end', function() {\n self.push(null);\n });\n\n // give it a kick whenever the source is readable\n // read(0) will not consume any bytes\n source.on('readable', function() {\n self.read(0);\n });\n\n this._rawHeader = [];\n this.header = null;\n}\n\nSimpleProtocol.prototype = Object.create(\n Readable.prototype, { constructor: { value: SimpleProtocol }});\n\nSimpleProtocol.prototype._read = function(n) {\n if (!this._inBody) {\n var chunk = this._source.read();\n\n // if the source doesn't have data, we don't have data yet.\n if (chunk === null)\n return this.push('');\n\n // check if the chunk has a \\n\\n\n var split = -1;\n for (var i = 0; i < chunk.length; i++) {\n if (chunk[i] === 10) { // '\\n'\n if (this._sawFirstCr) {\n split = i;\n break;\n } else {\n this._sawFirstCr = true;\n }\n } else {\n this._sawFirstCr = false;\n }\n }\n\n if (split === -1) {\n // still waiting for the \\n\\n\n // stash the chunk, and try again.\n this._rawHeader.push(chunk);\n this.push('');\n } else {\n this._inBody = true;\n var h = chunk.slice(0, split);\n this._rawHeader.push(h);\n var header = Buffer.concat(this._rawHeader).toString();\n try {\n this.header = JSON.parse(header);\n } catch (er) {\n this.emit('error', new Error('invalid simple protocol data'));\n return;\n }\n // now, because we got some extra data, unshift the rest\n // back into the read queue so that our consumer will see it.\n var b = chunk.slice(split);\n this.unshift(b);\n\n // and let them know that we are done parsing the header.\n this.emit('header', this.header);\n }\n } else {\n // from there on, just provide the data to our consumer.\n // careful not to push(null), since that would indicate EOF.\n var chunk = this._source.read();\n if (chunk) this.push(chunk);\n }\n};\n\n// Usage:\nvar parser = new SimpleProtocol(source);\n// Now parser is a readable stream that will emit 'header'\n// with the parsed header data.\n```\n\n### readable.wrap(stream)\n\n* `stream` {Stream} An \"old style\" readable stream\n\nIf you are using an older Node library that emits `'data'` events and\nhas a `pause()` method that is advisory only, then you can use the\n`wrap()` method to create a Readable stream that uses the old stream\nas its data source.\n\nFor example:\n\n```javascript\nvar OldReader = require('./old-api-module.js').OldReader;\nvar oreader = new OldReader;\nvar Readable = require('stream').Readable;\nvar myReader = new Readable().wrap(oreader);\n\nmyReader.on('readable', function() {\n myReader.read(); // etc.\n});\n```\n\n### Event: 'readable'\n\nWhen there is data ready to be consumed, this event will fire.\n\nWhen this event emits, call the `read()` method to consume the data.\n\n### Event: 'end'\n\nEmitted when the stream has received an EOF (FIN in TCP terminology).\nIndicates that no more `'data'` events will happen. If the stream is\nalso writable, it may be possible to continue writing.\n\n### Event: 'data'\n\nThe `'data'` event emits either a `Buffer` (by default) or a string if\n`setEncoding()` was used.\n\nNote that adding a `'data'` event listener will switch the Readable\nstream into \"old mode\", where data is emitted as soon as it is\navailable, rather than waiting for you to call `read()` to consume it.\n\n### Event: 'error'\n\nEmitted if there was an error receiving data.\n\n### Event: 'close'\n\nEmitted when the underlying resource (for example, the backing file\ndescriptor) has been closed. Not all streams will emit this.\n\n### readable.setEncoding(encoding)\n\nMakes the `'data'` event emit a string instead of a `Buffer`. `encoding`\ncan be `'utf8'`, `'utf16le'` (`'ucs2'`), `'ascii'`, or `'hex'`.\n\nThe encoding can also be set by specifying an `encoding` field to the\nconstructor.\n\n### readable.read([size])\n\n* `size` {Number | null} Optional number of bytes to read.\n* Return: {Buffer | String | null}\n\nNote: **This function SHOULD be called by Readable stream users.**\n\nCall this method to consume data once the `'readable'` event is\nemitted.\n\nThe `size` argument will set a minimum number of bytes that you are\ninterested in. If not set, then the entire content of the internal\nbuffer is returned.\n\nIf there is no data to consume, or if there are fewer bytes in the\ninternal buffer than the `size` argument, then `null` is returned, and\na future `'readable'` event will be emitted when more is available.\n\nCalling `stream.read(0)` will always return `null`, and will trigger a\nrefresh of the internal buffer, but otherwise be a no-op.\n\n### readable.pipe(destination, [options])\n\n* `destination` {Writable Stream}\n* `options` {Object} Optional\n * `end` {Boolean} Default=true\n\nConnects this readable stream to `destination` WriteStream. Incoming\ndata on this stream gets written to `destination`. Properly manages\nback-pressure so that a slow destination will not be overwhelmed by a\nfast readable stream.\n\nThis function returns the `destination` stream.\n\nFor example, emulating the Unix `cat` command:\n\n process.stdin.pipe(process.stdout);\n\nBy default `end()` is called on the destination when the source stream\nemits `end`, so that `destination` is no longer writable. Pass `{ end:\nfalse }` as `options` to keep the destination stream open.\n\nThis keeps `writer` open so that \"Goodbye\" can be written at the\nend.\n\n reader.pipe(writer, { end: false });\n reader.on(\"end\", function() {\n writer.end(\"Goodbye\\n\");\n });\n\nNote that `process.stderr` and `process.stdout` are never closed until\nthe process exits, regardless of the specified options.\n\n### readable.unpipe([destination])\n\n* `destination` {Writable Stream} Optional\n\nUndo a previously established `pipe()`. If no destination is\nprovided, then all previously established pipes are removed.\n\n### readable.pause()\n\nSwitches the readable stream into \"old mode\", where data is emitted\nusing a `'data'` event rather than being buffered for consumption via\nthe `read()` method.\n\nCeases the flow of data. No `'data'` events are emitted while the\nstream is in a paused state.\n\n### readable.resume()\n\nSwitches the readable stream into \"old mode\", where data is emitted\nusing a `'data'` event rather than being buffered for consumption via\nthe `read()` method.\n\nResumes the incoming `'data'` events after a `pause()`.\n\n\n## Class: stream.Writable\n\n\n\nA `Writable` Stream has the following methods, members, and events.\n\nNote that `stream.Writable` is an abstract class designed to be\nextended with an underlying implementation of the\n`_write(chunk, encoding, cb)` method. (See below.)\n\n### new stream.Writable([options])\n\n* `options` {Object}\n * `highWaterMark` {Number} Buffer level when `write()` starts\n returning false. Default=16kb\n * `decodeStrings` {Boolean} Whether or not to decode strings into\n Buffers before passing them to `_write()`. Default=true\n\nIn classes that extend the Writable class, make sure to call the\nconstructor so that the buffering settings can be properly\ninitialized.\n\n### writable.\\_write(chunk, encoding, callback)\n\n* `chunk` {Buffer | String} The chunk to be written. Will always\n be a buffer unless the `decodeStrings` option was set to `false`.\n* `encoding` {String} If the chunk is a string, then this is the\n encoding type. Ignore chunk is a buffer. Note that chunk will\n **always** be a buffer unless the `decodeStrings` option is\n explicitly set to `false`.\n* `callback` {Function} Call this function (optionally with an error\n argument) when you are done processing the supplied chunk.\n\nAll Writable stream implementations must provide a `_write` method to\nsend data to the underlying resource.\n\nNote: **This function MUST NOT be called directly.** It should be\nimplemented by child classes, and called by the internal Writable\nclass methods only.\n\nCall the callback using the standard `callback(error)` pattern to\nsignal that the write completed successfully or with an error.\n\nIf the `decodeStrings` flag is set in the constructor options, then\n`chunk` may be a string rather than a Buffer, and `encoding` will\nindicate the sort of string that it is. This is to support\nimplementations that have an optimized handling for certain string\ndata encodings. If you do not explicitly set the `decodeStrings`\noption to `false`, then you can safely ignore the `encoding` argument,\nand assume that `chunk` will always be a Buffer.\n\nThis method is prefixed with an underscore because it is internal to\nthe class that defines it, and should not be called directly by user\nprograms. However, you **are** expected to override this method in\nyour own extension classes.\n\n\n### writable.write(chunk, [encoding], [callback])\n\n* `chunk` {Buffer | String} Data to be written\n* `encoding` {String} Optional. If `chunk` is a string, then encoding\n defaults to `'utf8'`\n* `callback` {Function} Optional. Called when this chunk is\n successfully written.\n* Returns {Boolean}\n\nWrites `chunk` to the stream. Returns `true` if the data has been\nflushed to the underlying resource. Returns `false` to indicate that\nthe buffer is full, and the data will be sent out in the future. The\n`'drain'` event will indicate when the buffer is empty again.\n\nThe specifics of when `write()` will return false, is determined by\nthe `highWaterMark` option provided to the constructor.\n\n### writable.end([chunk], [encoding], [callback])\n\n* `chunk` {Buffer | String} Optional final data to be written\n* `encoding` {String} Optional. If `chunk` is a string, then encoding\n defaults to `'utf8'`\n* `callback` {Function} Optional. Called when the final chunk is\n successfully written.\n\nCall this method to signal the end of the data being written to the\nstream.\n\n### Event: 'drain'\n\nEmitted when the stream's write queue empties and it's safe to write\nwithout buffering again. Listen for it when `stream.write()` returns\n`false`.\n\n### Event: 'close'\n\nEmitted when the underlying resource (for example, the backing file\ndescriptor) has been closed. Not all streams will emit this.\n\n### Event: 'finish'\n\nWhen `end()` is called and there are no more chunks to write, this\nevent is emitted.\n\n### Event: 'pipe'\n\n* `source` {Readable Stream}\n\nEmitted when the stream is passed to a readable stream's pipe method.\n\n### Event 'unpipe'\n\n* `source` {Readable Stream}\n\nEmitted when a previously established `pipe()` is removed using the\nsource Readable stream's `unpipe()` method.\n\n## Class: stream.Duplex\n\n\n\nA \"duplex\" stream is one that is both Readable and Writable, such as a\nTCP socket connection.\n\nNote that `stream.Duplex` is an abstract class designed to be\nextended with an underlying implementation of the `_read(size)`\nand `_write(chunk, encoding, callback)` methods as you would with a Readable or\nWritable stream class.\n\nSince JavaScript doesn't have multiple prototypal inheritance, this\nclass prototypally inherits from Readable, and then parasitically from\nWritable. It is thus up to the user to implement both the lowlevel\n`_read(n)` method as well as the lowlevel `_write(chunk, encoding, cb)` method\non extension duplex classes.\n\n### new stream.Duplex(options)\n\n* `options` {Object} Passed to both Writable and Readable\n constructors. Also has the following fields:\n * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then\n the stream will automatically end the readable side when the\n writable side ends and vice versa.\n\nIn classes that extend the Duplex class, make sure to call the\nconstructor so that the buffering settings can be properly\ninitialized.\n\n## Class: stream.Transform\n\nA \"transform\" stream is a duplex stream where the output is causally\nconnected in some way to the input, such as a zlib stream or a crypto\nstream.\n\nThere is no requirement that the output be the same size as the input,\nthe same number of chunks, or arrive at the same time. For example, a\nHash stream will only ever have a single chunk of output which is\nprovided when the input is ended. A zlib stream will either produce\nmuch smaller or much larger than its input.\n\nRather than implement the `_read()` and `_write()` methods, Transform\nclasses must implement the `_transform()` method, and may optionally\nalso implement the `_flush()` method. (See below.)\n\n### new stream.Transform([options])\n\n* `options` {Object} Passed to both Writable and Readable\n constructors.\n\nIn classes that extend the Transform class, make sure to call the\nconstructor so that the buffering settings can be properly\ninitialized.\n\n### transform.\\_transform(chunk, encoding, callback)\n\n* `chunk` {Buffer | String} The chunk to be transformed. Will always\n be a buffer unless the `decodeStrings` option was set to `false`.\n* `encoding` {String} If the chunk is a string, then this is the\n encoding type. (Ignore if `decodeStrings` chunk is a buffer.)\n* `callback` {Function} Call this function (optionally with an error\n argument) when you are done processing the supplied chunk.\n\nNote: **This function MUST NOT be called directly.** It should be\nimplemented by child classes, and called by the internal Transform\nclass methods only.\n\nAll Transform stream implementations must provide a `_transform`\nmethod to accept input and produce output.\n\n`_transform` should do whatever has to be done in this specific\nTransform class, to handle the bytes being written, and pass them off\nto the readable portion of the interface. Do asynchronous I/O,\nprocess things, and so on.\n\nCall `transform.push(outputChunk)` 0 or more times to generate output\nfrom this input chunk, depending on how much data you want to output\nas a result of this chunk.\n\nCall the callback function only when the current chunk is completely\nconsumed. Note that there may or may not be output as a result of any\nparticular input chunk.\n\nThis method is prefixed with an underscore because it is internal to\nthe class that defines it, and should not be called directly by user\nprograms. However, you **are** expected to override this method in\nyour own extension classes.\n\n### transform.\\_flush(callback)\n\n* `callback` {Function} Call this function (optionally with an error\n argument) when you are done flushing any remaining data.\n\nNote: **This function MUST NOT be called directly.** It MAY be implemented\nby child classes, and if so, will be called by the internal Transform\nclass methods only.\n\nIn some cases, your transform operation may need to emit a bit more\ndata at the end of the stream. For example, a `Zlib` compression\nstream will store up some internal state so that it can optimally\ncompress the output. At the end, however, it needs to do the best it\ncan with what is left, so that the data will be complete.\n\nIn those cases, you can implement a `_flush` method, which will be\ncalled at the very end, after all the written data is consumed, but\nbefore emitting `end` to signal the end of the readable side. Just\nlike with `_transform`, call `transform.push(chunk)` zero or more\ntimes, as appropriate, and call `callback` when the flush operation is\ncomplete.\n\nThis method is prefixed with an underscore because it is internal to\nthe class that defines it, and should not be called directly by user\nprograms. However, you **are** expected to override this method in\nyour own extension classes.\n\n### Example: `SimpleProtocol` parser\n\nThe example above of a simple protocol parser can be implemented much\nmore simply by using the higher level `Transform` stream class.\n\nIn this example, rather than providing the input as an argument, it\nwould be piped into the parser, which is a more idiomatic Node stream\napproach.\n\n```javascript\nfunction SimpleProtocol(options) {\n if (!(this instanceof SimpleProtocol))\n return new SimpleProtocol(options);\n\n Transform.call(this, options);\n this._inBody = false;\n this._sawFirstCr = false;\n this._rawHeader = [];\n this.header = null;\n}\n\nSimpleProtocol.prototype = Object.create(\n Transform.prototype, { constructor: { value: SimpleProtocol }});\n\nSimpleProtocol.prototype._transform = function(chunk, encoding, done) {\n if (!this._inBody) {\n // check if the chunk has a \\n\\n\n var split = -1;\n for (var i = 0; i < chunk.length; i++) {\n if (chunk[i] === 10) { // '\\n'\n if (this._sawFirstCr) {\n split = i;\n break;\n } else {\n this._sawFirstCr = true;\n }\n } else {\n this._sawFirstCr = false;\n }\n }\n\n if (split === -1) {\n // still waiting for the \\n\\n\n // stash the chunk, and try again.\n this._rawHeader.push(chunk);\n } else {\n this._inBody = true;\n var h = chunk.slice(0, split);\n this._rawHeader.push(h);\n var header = Buffer.concat(this._rawHeader).toString();\n try {\n this.header = JSON.parse(header);\n } catch (er) {\n this.emit('error', new Error('invalid simple protocol data'));\n return;\n }\n // and let them know that we are done parsing the header.\n this.emit('header', this.header);\n\n // now, because we got some extra data, emit this first.\n this.push(b);\n }\n } else {\n // from there on, just provide the data to our consumer as-is.\n this.push(b);\n }\n done();\n};\n\nvar parser = new SimpleProtocol();\nsource.pipe(parser)\n\n// Now parser is a readable stream that will emit 'header'\n// with the parsed header data.\n```\n\n\n## Class: stream.PassThrough\n\nThis is a trivial implementation of a `Transform` stream that simply\npasses the input bytes across to the output. Its purpose is mainly\nfor examples and testing, but there are occasionally use cases where\nit can come in handy.\n\n\n[EventEmitter]: events.html#events_class_events_eventemitter\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/readable-stream/issues" - }, - "homepage": "https://github.com/isaacs/readable-stream", - "_id": "readable-stream@1.1.10", - "_from": "readable-stream@~1.1.9" -} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/package.json b/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/package.json deleted file mode 100644 index 1453933a1..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "stream-counter", - "version": "0.2.0", - "description": "keeps track of how many bytes have been written to a stream", - "main": "index.js", - "scripts": { - "test": "node test/test.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/superjoe30/node-stream-counter.git" - }, - "author": { - "name": "Andrew Kelley", - "email": "superjoe30@gmail.com" - }, - "license": "BSD", - "engines": { - "node": ">=0.8.0" - }, - "dependencies": { - "readable-stream": "~1.1.8" - }, - "readme": "# stream-counter\n\nKeep track of how many bytes have been written to a stream.\n\n## Usage\n\n```js\nvar StreamCounter = require('stream-counter');\nvar counter = new StreamCounter();\ncounter.on('progress', function() {\n console.log(\"progress\", counter.bytes);\n});\nfs.createReadStream('foo.txt').pipe(counter);\n```\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/superjoe30/node-stream-counter/issues" - }, - "homepage": "https://github.com/superjoe30/node-stream-counter", - "_id": "stream-counter@0.2.0", - "_from": "stream-counter@~0.2.0" -} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/package.json b/node_modules/express/node_modules/connect/node_modules/multiparty/package.json deleted file mode 100644 index 865cb96ff..000000000 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "multiparty", - "version": "2.2.0", - "description": "multipart/form-data parser which supports streaming", - "repository": { - "type": "git", - "url": "git@github.com:superjoe30/node-multiparty.git" - }, - "keywords": [ - "file", - "upload", - "formidable", - "stream", - "s3" - ], - "devDependencies": { - "findit": "0.1.1", - "hashish": "0.0.4", - "mocha": "~1.8.2", - "request": "~2.16.6", - "mkdirp": "~0.3.5", - "superagent": "~0.14.1" - }, - "scripts": { - "test": "ulimit -n 500 && mocha --timeout 4000 --reporter spec --recursive test/test.js" - }, - "engines": { - "node": ">=0.8.0" - }, - "license": "MIT", - "dependencies": { - "readable-stream": "~1.1.9", - "stream-counter": "~0.2.0" - }, - "readme": "[![Build Status](https://travis-ci.org/superjoe30/node-multiparty.png?branch=master)](https://travis-ci.org/superjoe30/node-multiparty)\n# multiparty\n\nParse http requests with content-type `multipart/form-data`, also known as file uploads.\n\nSee also [busboy](https://github.com/mscdex/busboy) - a\n[faster](https://github.com/mscdex/dicer/wiki/Benchmarks) alternative\nwhich may be worth looking into.\n\n### Why the fork?\n\n * This module uses the Node.js v0.10 streams properly, *even in Node.js v0.8*\n * It will not create a temp file for you unless you want it to.\n * Counts bytes and does math to help you figure out the `Content-Length` of\n each part.\n * You can easily stream uploads to s3 with\n [knox](https://github.com/LearnBoost/knox), for [example](examples/s3.js).\n * Less bugs. This code is simpler, has all deprecated functionality removed,\n has cleaner tests, and does not try to do anything beyond multipart stream\n parsing.\n\n## Installation\n\n```\nnpm install multiparty\n```\n\n## Usage\n\n * See [examples](examples).\n\nParse an incoming `multipart/form-data` request.\n\n```js\nvar multiparty = require('multiparty')\n , http = require('http')\n , util = require('util')\n\nhttp.createServer(function(req, res) {\n if (req.url === '/upload' && req.method === 'POST') {\n // parse a file upload\n var form = new multiparty.Form();\n\n form.parse(req, function(err, fields, files) {\n res.writeHead(200, {'content-type': 'text/plain'});\n res.write('received upload:\\n\\n');\n res.end(util.inspect({fields: fields, files: files}));\n });\n\n return;\n }\n\n // show a file upload form\n res.writeHead(200, {'content-type': 'text/html'});\n res.end(\n '
'+\n '
'+\n '
'+\n ''+\n '
'\n );\n}).listen(8080);\n```\n\n## API\n\n### multiparty.Form\n```js\nvar form = new multiparty.Form(options)\n```\nCreates a new form. Options:\n\n * `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`.\n * `maxFieldSize` - Limits the amount of memory a field (not a file) can\n allocate in bytes. If this value is exceeded, an `error` event is emitted.\n The default size is 2MB.\n * `maxFields` - Limits the number of fields that will be parsed before\n emitting an `error` event. A file counts as a field in this case.\n Defaults to 1000.\n * `autoFields` - Enables `field` events. This is automatically set to `true`\n if you add a `field` listener.\n * `autoFiles` - Enables `file` events. This is automatically set to `true`\n if you add a `file` listener.\n * `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for\n placing file uploads in. You can move them later using `fs.rename()`.\n Defaults to `os.tmpDir()`.\n * `hash` - Only relevant when `autoFiles` is `true`. If you want checksums\n calculated for incoming files, set this to either `sha1` or `md5`.\n Defaults to off.\n\n#### form.parse(request, [cb])\n\nParses an incoming node.js `request` containing form data. If `cb` is\nprovided, `autoFields` and `autoFiles` are set to `true` and all fields and\nfiles are collected and passed to the callback:\n\n```js\nform.parse(req, function(err, fieldsObject, filesObject, fieldsList, filesList) {\n // ...\n});\n```\n\nIt is often convenient to access a field or file by name. In this situation,\nuse `fieldsObject` or `filesObject`. However sometimes, as in the case of a\n`` the multipart stream will contain\nmultiple files of the same input name, and you are interested in all of them.\nIn this case, use `filesList`.\n\nAnother example is when you do not care what the field name of a file is; you\nare merely interested in a single upload. In this case, set `maxFields` to 1\n(assuming no other fields expected besides the file) and use `filesList[0]`.\n\n#### form.bytesReceived\n\nThe amount of bytes received for this form so far.\n\n#### form.bytesExpected\n\nThe expected number of bytes in this form.\n\n### Events\n\n#### 'error' (err)\n\nYou definitely want to handle this event. If not your server *will* crash when\nusers submit bogus multipart requests!\n\n#### 'part' (part)\n\nEmitted when a part is encountered in the request. `part` is a\n`ReadableStream`. It also has the following properties:\n\n * `headers` - the headers for this part. For example, you may be interested\n in `content-type`.\n * `name` - the field name for this part\n * `filename` - only if the part is an incoming file\n * `byteOffset` - the byte offset of this part in the request body\n * `byteCount` - assuming that this is the last part in the request,\n this is the size of this part in bytes. You could use this, for\n example, to set the `Content-Length` header if uploading to S3.\n If the part had a `Content-Length` header then that value is used\n here instead.\n\n#### 'aborted'\n\nEmitted when the request is aborted. This event will be followed shortly\nby an `error` event. In practice you do not need to handle this event.\n\n#### 'progress' (bytesReceived, bytesExpected)\n\n#### 'close'\n\nEmitted after all parts have been parsed and emitted. Not emitted if an `error`\nevent is emitted. This is typically when you would send your response.\n\n#### 'file' (name, file)\n\n**By default multiparty will not touch your hard drive.** But if you add this\nlistener, multiparty automatically sets `form.autoFiles` to `true` and will\nstream uploads to disk for you. \n\n * `name` - the field name for this file\n * `file` - an object with these properties:\n - `fieldName` - same as `name` - the field name for this file\n - `originalFilename` - the filename that the user reports for the file\n - `path` - the absolute path of the uploaded file on disk\n - `headers` - the HTTP headers that were sent along with this file\n - `size` - size of the file in bytes\n\nIf you set the `form.hash` option, then `file` will also contain a `hash`\nproperty which is the checksum of the file.\n\n#### 'field' (name, value)\n\n * `name` - field name\n * `value` - string field value\n\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/superjoe30/node-multiparty/issues" - }, - "homepage": "https://github.com/superjoe30/node-multiparty", - "_id": "multiparty@2.2.0", - "dist": { - "shasum": "671d6e5fb688c8168f4a0e501580acca1fc2019c" - }, - "_from": "multiparty@2.2.0", - "_resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/package.json b/node_modules/express/node_modules/connect/node_modules/negotiator/package.json deleted file mode 100644 index 18fc25e2b..000000000 --- a/node_modules/express/node_modules/connect/node_modules/negotiator/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "negotiator", - "description": "HTTP content negotiation", - "version": "0.3.0", - "author": { - "name": "Federico Romero", - "email": "federico.romero@outboxlabs.com" - }, - "contributors": [ - { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - } - ], - "repository": { - "type": "git", - "url": "git://github.com/federomero/negotiator.git" - }, - "keywords": [ - "http", - "content negotiation", - "accept", - "accept-language", - "accept-encoding", - "accept-charset" - ], - "engine": "node >= 0.6", - "license": "MIT", - "devDependencies": { - "nodeunit": "0.6.x" - }, - "scripts": { - "test": "nodeunit test" - }, - "optionalDependencies": {}, - "engines": { - "node": "*" - }, - "main": "lib/negotiator.js", - "readme": "# Negotiator\n\nAn HTTP content negotiator for node.js written in javascript.\n\n# Accept Negotiation\n\n Negotiator = require('negotiator')\n\n availableMediaTypes = ['text/html', 'text/plain', 'application/json']\n\n // The negotiator constructor receives a request object\n negotiator = new Negotiator(request)\n\n // Let's say Accept header is 'text/html, application/*;q=0.2, image/jpeg;q=0.8'\n\n negotiator.preferredMediaTypes()\n // -> ['text/html', 'image/jpeg', 'application/*']\n\n negotiator.preferredMediaTypes(availableMediaTypes)\n // -> ['text/html', 'application/json']\n\n negotiator.preferredMediaType(availableMediaTypes)\n // -> 'text/html'\n\nYou can check a working example at `examples/accept.js`.\n\n## Methods\n\n`preferredMediaTypes(availableMediaTypes)`:\n\nReturns an array of preferred media types ordered by priority from a list of available media types.\n\n`preferredMediaType(availableMediaType)`:\n\nReturns the top preferred media type from a list of available media types.\n\n# Accept-Language Negotiation\n\n Negotiator = require('negotiator')\n\n negotiator = new Negotiator(request)\n\n availableLanguages = 'en', 'es', 'fr'\n\n // Let's say Accept-Language header is 'en;q=0.8, es, pt'\n\n negotiator.preferredLanguages()\n // -> ['es', 'pt', 'en']\n\n negotiator.preferredLanguages(availableLanguages)\n // -> ['es', 'en']\n\n language = negotiator.preferredLanguage(availableLanguages)\n // -> 'es'\n\nYou can check a working example at `examples/language.js`.\n\n## Methods\n\n`preferredLanguages(availableLanguages)`:\n\nReturns an array of preferred languages ordered by priority from a list of available languages.\n\n`preferredLanguage(availableLanguages)`:\n\nReturns the top preferred language from a list of available languages.\n\n# Accept-Charset Negotiation\n\n Negotiator = require('negotiator')\n\n availableCharsets = ['utf-8', 'iso-8859-1', 'iso-8859-5']\n\n negotiator = new Negotiator(request)\n\n // Let's say Accept-Charset header is 'utf-8, iso-8859-1;q=0.8, utf-7;q=0.2'\n\n negotiator.preferredCharsets()\n // -> ['utf-8', 'iso-8859-1', 'utf-7']\n\n negotiator.preferredCharsets(availableCharsets)\n // -> ['utf-8', 'iso-8859-1']\n\n negotiator.preferredCharset(availableCharsets)\n // -> 'utf-8'\n\nYou can check a working example at `examples/charset.js`.\n\n## Methods\n\n`preferredCharsets(availableCharsets)`:\n\nReturns an array of preferred charsets ordered by priority from a list of available charsets.\n\n`preferredCharset(availableCharsets)`:\n\nReturns the top preferred charset from a list of available charsets.\n\n# Accept-Encoding Negotiation\n\n Negotiator = require('negotiator').Negotiator\n\n availableEncodings = ['identity', 'gzip']\n\n negotiator = new Negotiator(request)\n\n // Let's say Accept-Encoding header is 'gzip, compress;q=0.2, identity;q=0.5'\n\n negotiator.preferredEncodings()\n // -> ['gzip', 'identity', 'compress']\n\n negotiator.preferredEncodings(availableEncodings)\n // -> ['gzip', 'identity']\n\n negotiator.preferredEncoding(availableEncodings)\n // -> 'gzip'\n\nYou can check a working example at `examples/encoding.js`.\n\n## Methods\n\n`preferredEncodings(availableEncodings)`:\n\nReturns an array of preferred encodings ordered by priority from a list of available encodings.\n\n`preferredEncoding(availableEncodings)`:\n\nReturns the top preferred encoding from a list of available encodings.\n\n# License\n\nMIT\n", - "readmeFilename": "readme.md", - "bugs": { - "url": "https://github.com/federomero/negotiator/issues" - }, - "homepage": "https://github.com/federomero/negotiator", - "dependencies": {}, - "_id": "negotiator@0.3.0", - "dist": { - "shasum": "76b4d7f976102e47fe0cc501ddab061f3a122d9f" - }, - "_from": "negotiator@0.3.0", - "_resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/pause/package.json b/node_modules/express/node_modules/connect/node_modules/pause/package.json deleted file mode 100644 index 045682778..000000000 --- a/node_modules/express/node_modules/connect/node_modules/pause/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "pause", - "version": "0.0.1", - "description": "Pause streams...", - "keywords": [], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "main": "index", - "readme": "\n# pause\n\n Pause streams...\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "readmeFilename": "Readme.md", - "_id": "pause@0.0.1", - "dist": { - "shasum": "6a6ba4ff6aed2c7a98f3781d8628af985898425b" - }, - "_from": "pause@0.0.1", - "_resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/qs/package.json b/node_modules/express/node_modules/connect/node_modules/qs/package.json deleted file mode 100644 index ae4f95a5b..000000000 --- a/node_modules/express/node_modules/connect/node_modules/qs/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "qs", - "description": "querystring parser", - "version": "0.6.6", - "keywords": [ - "query string", - "parser", - "component" - ], - "repository": { - "type": "git", - "url": "git://github.com/visionmedia/node-querystring.git" - }, - "devDependencies": { - "mocha": "*", - "expect.js": "*" - }, - "scripts": { - "test": "make test" - }, - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca", - "url": "http://tjholowaychuk.com" - }, - "main": "index", - "engines": { - "node": "*" - }, - "readme": "# node-querystring\n\n query string parser for node and the browser supporting nesting, as it was removed from `0.3.x`, so this library provides the previous and commonly desired behaviour (and twice as fast). Used by [express](http://expressjs.com), [connect](http://senchalabs.github.com/connect) and others.\n\n## Installation\n\n $ npm install qs\n\n## Examples\n\n```js\nvar qs = require('qs');\n\nqs.parse('user[name][first]=Tobi&user[email]=tobi@learnboost.com');\n// => { user: { name: { first: 'Tobi' }, email: 'tobi@learnboost.com' } }\n\nqs.stringify({ user: { name: 'Tobi', email: 'tobi@learnboost.com' }})\n// => user[name]=Tobi&user[email]=tobi%40learnboost.com\n```\n\n## Testing\n\nInstall dev dependencies:\n\n $ npm install -d\n\nand execute:\n\n $ make test\n\nbrowser:\n\n $ open test/browser/index.html\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2010 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/node-querystring/issues" - }, - "homepage": "https://github.com/visionmedia/node-querystring", - "_id": "qs@0.6.6", - "dist": { - "shasum": "ea877bcb95c37f906a311d04885cf23c97c13998" - }, - "_from": "qs@0.6.6", - "_resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/package.json b/node_modules/express/node_modules/connect/node_modules/raw-body/package.json deleted file mode 100644 index e077c02c6..000000000 --- a/node_modules/express/node_modules/connect/node_modules/raw-body/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "raw-body", - "description": "Get and validate the raw body of a readable stream.", - "version": "1.1.2", - "author": { - "name": "Jonathan Ong", - "email": "me@jongleberry.com", - "url": "http://jongleberry.com" - }, - "license": "MIT", - "repository": { - "type": "git", - "url": "https://github.com/stream-utils/raw-body.git" - }, - "bugs": { - "url": "https://github.com/stream-utils/raw-body/issues" - }, - "dependencies": { - "bytes": "~0.2.1" - }, - "devDependencies": { - "readable-stream": "~1.0.17", - "co": "2", - "gnode": "~0.0.4", - "mocha": "~1.14.0", - "through": "~2.3.4", - "request": "~2.27.0", - "assert-tap": "~0.1.4" - }, - "scripts": { - "test": "NODE=gnode make test && node ./test/acceptance.js" - }, - "engines": { - "node": ">= 0.8.0" - }, - "readme": "# Raw Body [![Build Status](https://travis-ci.org/stream-utils/raw-body.png)](https://travis-ci.org/stream-utils/raw-body)\n\nGets the entire buffer of a stream either as a `Buffer` or a string.\nValidates the stream's length against an expected length and maximum limit.\nIdeal for parsing request bodies.\n\n## API\n\n```js\nvar getRawBody = require('raw-body')\n\napp.use(function (req, res, next) {\n getRawBody(req, {\n length: req.headers['content-length'],\n limit: '1mb',\n encoding: 'utf8'\n }, function (err, string) {\n if (err)\n return next(err)\n\n req.text = string\n next()\n })\n})\n```\n\nor in a Koa generator:\n\n```js\napp.use(function* (next) {\n var string = yield getRawBody(this.req, {\n length: this.length,\n limit: '1mb',\n encoding: 'utf8'\n })\n})\n```\n\n### getRawBody(stream, [options], [callback])\n\nReturns a thunk for yielding with generators.\n\nOptions:\n\n- `length` - The length length of the stream.\n If the contents of the stream do not add up to this length,\n an `400` error code is returned.\n- `limit` - The byte limit of the body.\n If the body ends up being larger than this limit,\n a `413` error code is returned.\n- `encoding` - The requested encoding.\n By default, a `Buffer` instance will be returned.\n Most likely, you want `utf8`.\n You can use any type of encoding supported by [StringDecoder](http://nodejs.org/api/string_decoder.html).\n You can also pass `true` which sets it to the default `utf8`\n\n`callback(err, res)`:\n\n- `err` - the following attributes will be defined if applicable:\n\n - `limit` - the limit in bytes\n - `length` and `expected` - the expected length of the stream\n - `received` - the received bytes\n - `status` and `statusCode` - the corresponding status code for the error\n - `type` - either `entity.too.large`, `request.size.invalid`, or `stream.encoding.set`\n\n- `res` - the result, either as a `String` if an encoding was set or a `Buffer` otherwise.\n\nIf an error occurs, the stream will be paused,\nand you are responsible for correctly disposing the stream.\nFor HTTP requests, no handling is required if you send a response.\nFor streams that use file descriptors, you should `stream.destroy()` or `stream.close()` to prevent leaks.\n\n## License\n\nThe MIT License (MIT)\n\nCopyright (c) 2013 Jonathan Ong me@jongleberry.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n", - "readmeFilename": "README.md", - "homepage": "https://github.com/stream-utils/raw-body", - "_id": "raw-body@1.1.2", - "dist": { - "shasum": "55fd7c5da025273ccddb645397e0b0a73c8fb97d" - }, - "_from": "raw-body@1.1.2", - "_resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz" -} diff --git a/node_modules/express/node_modules/connect/node_modules/uid2/package.json b/node_modules/express/node_modules/connect/node_modules/uid2/package.json deleted file mode 100644 index b9786fab6..000000000 --- a/node_modules/express/node_modules/connect/node_modules/uid2/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "uid2", - "description": "strong uid", - "tags": [ - "uid" - ], - "version": "0.0.3", - "dependencies": {}, - "readme": "ERROR: No README data found!", - "_id": "uid2@0.0.3", - "dist": { - "shasum": "d703205c61c80bcdf3b295602c523bd3a312dd6e" - }, - "_from": "uid2@0.0.3", - "_resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz" -} diff --git a/node_modules/express/node_modules/connect/package.json b/node_modules/express/node_modules/connect/package.json deleted file mode 100644 index b3c6240aa..000000000 --- a/node_modules/express/node_modules/connect/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "connect", - "version": "2.12.0", - "description": "High performance middleware framework", - "keywords": [ - "framework", - "web", - "middleware", - "connect", - "rack" - ], - "repository": { - "type": "git", - "url": "git://github.com/senchalabs/connect.git" - }, - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca", - "url": "http://tjholowaychuk.com" - }, - "dependencies": { - "batch": "0.5.0", - "qs": "0.6.6", - "cookie-signature": "1.0.1", - "buffer-crc32": "0.2.1", - "cookie": "0.1.0", - "send": "0.1.4", - "bytes": "0.2.1", - "fresh": "0.2.0", - "pause": "0.0.1", - "uid2": "0.0.3", - "debug": ">= 0.7.3 < 1", - "methods": "0.1.0", - "raw-body": "1.1.2", - "negotiator": "0.3.0", - "multiparty": "2.2.0" - }, - "devDependencies": { - "should": ">= 2.0.2 < 3", - "mocha": ">= 1.13.0 < 2", - "jade": ">= 0.35.0 < 1", - "dox": ">= 0.4.4 < 1" - }, - "licenses": [ - { - "type": "MIT", - "url": "https://raw.github.com/senchalabs/connect/master/LICENSE" - } - ], - "main": "index", - "engines": { - "node": ">= 0.8.0" - }, - "scripts": { - "test": "make" - }, - "readme": "# Connect [![build status](https://secure.travis-ci.org/senchalabs/connect.png)](http://travis-ci.org/senchalabs/connect)\n\n Connect is an extensible HTTP server framework for [node](http://nodejs.org), providing high performance \"plugins\" known as _middleware_.\n\n Connect is bundled with over _20_ commonly used middleware, including\n a logger, session support, cookie parser, and [more](http://senchalabs.github.com/connect). Be sure to view the 2.x [documentation](http://senchalabs.github.com/connect/).\n\n```js\nvar connect = require('connect')\n , http = require('http');\n\nvar app = connect()\n .use(connect.favicon())\n .use(connect.logger('dev'))\n .use(connect.static('public'))\n .use(connect.directory('public'))\n .use(connect.cookieParser())\n .use(connect.session({ secret: 'my secret here' }))\n .use(function(req, res){\n res.end('Hello from Connect!\\n');\n });\n\nhttp.createServer(app).listen(3000);\n```\n\n## Middleware\n\n - [basicAuth](http://www.senchalabs.org/connect/basicAuth.html)\n - [bodyParser](http://www.senchalabs.org/connect/bodyParser.html)\n - [compress](http://www.senchalabs.org/connect/compress.html)\n - [cookieParser](http://www.senchalabs.org/connect/cookieParser.html)\n - [cookieSession](http://www.senchalabs.org/connect/cookieSession.html)\n - [csrf](http://www.senchalabs.org/connect/csrf.html)\n - [directory](http://www.senchalabs.org/connect/directory.html)\n - [errorHandler](http://www.senchalabs.org/connect/errorHandler.html)\n - [favicon](http://www.senchalabs.org/connect/favicon.html)\n - [json](http://www.senchalabs.org/connect/json.html)\n - [limit](http://www.senchalabs.org/connect/limit.html)\n - [logger](http://www.senchalabs.org/connect/logger.html)\n - [methodOverride](http://www.senchalabs.org/connect/methodOverride.html)\n - [multipart](http://www.senchalabs.org/connect/multipart.html)\n - [urlencoded](http://www.senchalabs.org/connect/urlencoded.html)\n - [query](http://www.senchalabs.org/connect/query.html)\n - [responseTime](http://www.senchalabs.org/connect/responseTime.html)\n - [session](http://www.senchalabs.org/connect/session.html)\n - [static](http://www.senchalabs.org/connect/static.html)\n - [staticCache](http://www.senchalabs.org/connect/staticCache.html)\n - [subdomains](http://www.senchalabs.org/connect/subdomains.html)\n - [vhost](http://www.senchalabs.org/connect/vhost.html)\n\n## Running Tests\n\nfirst:\n\n $ npm install -d\n\nthen:\n\n $ make test\n\n## Contributors\n\n https://github.com/senchalabs/connect/graphs/contributors\n\n## Node Compatibility\n\n Connect `< 1.x` is compatible with node 0.2.x\n\n\n Connect `1.x` is compatible with node 0.4.x\n\n\n Connect `2.x` is compatible with node 0.6.x\n\n\n Connect (_master_) is compatible with node 0.8.x\n\n## CLA\n\n [http://sencha.com/cla](http://sencha.com/cla)\n\n## License\n\nView the [LICENSE](https://github.com/senchalabs/connect/blob/master/LICENSE) file. The [Silk](http://www.famfamfam.com/lab/icons/silk/) icons used by the `directory` middleware created by/copyright of [FAMFAMFAM](http://www.famfamfam.com/).\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/senchalabs/connect/issues" - }, - "homepage": "https://github.com/senchalabs/connect", - "_id": "connect@2.12.0", - "dist": { - "shasum": "c22b246a17d16d9019266fcedccb80fe3478254a" - }, - "_from": "connect@2.12.0", - "_resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz" -} diff --git a/node_modules/express/node_modules/cookie-signature/package.json b/node_modules/express/node_modules/cookie-signature/package.json deleted file mode 100644 index fb2964988..000000000 --- a/node_modules/express/node_modules/cookie-signature/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "cookie-signature", - "version": "1.0.1", - "description": "Sign and unsign cookies", - "keywords": [ - "cookie", - "sign", - "unsign" - ], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@learnboost.com" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "main": "index", - "readme": "\n# cookie-signature\n\n Sign and unsign cookies.\n\n## Example\n\n```js\nvar cookie = require('cookie-signature');\n\nvar val = cookie.sign('hello', 'tobiiscool');\nval.should.equal('hello.DGDUkGlIkCzPz+C0B064FNgHdEjox7ch8tOBGslZ5QI');\n\nvar val = cookie.sign('hello', 'tobiiscool');\ncookie.unsign(val, 'tobiiscool').should.equal('hello');\ncookie.unsign(val, 'luna').should.be.false;\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 LearnBoost <tj@learnboost.com>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "readmeFilename": "Readme.md", - "_id": "cookie-signature@1.0.1", - "dist": { - "shasum": "8cb1735bc2445307e195d7acfcefc82a60f57d9c" - }, - "_from": "cookie-signature@1.0.1", - "_resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz" -} diff --git a/node_modules/express/node_modules/cookie/package.json b/node_modules/express/node_modules/cookie/package.json deleted file mode 100644 index 4732a577f..000000000 --- a/node_modules/express/node_modules/cookie/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "author": { - "name": "Roman Shtylman", - "email": "shtylman@gmail.com" - }, - "name": "cookie", - "description": "cookie parsing and serialization", - "version": "0.1.0", - "repository": { - "type": "git", - "url": "git://github.com/shtylman/node-cookie.git" - }, - "keywords": [ - "cookie", - "cookies" - ], - "main": "index.js", - "scripts": { - "test": "mocha" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "1.x.x" - }, - "optionalDependencies": {}, - "engines": { - "node": "*" - }, - "readme": "# cookie [![Build Status](https://secure.travis-ci.org/shtylman/node-cookie.png?branch=master)](http://travis-ci.org/shtylman/node-cookie) #\n\ncookie is a basic cookie parser and serializer. It doesn't make assumptions about how you are going to deal with your cookies. It basically just provides a way to read and write the HTTP cookie headers.\n\nSee [RFC6265](http://tools.ietf.org/html/rfc6265) for details about the http header for cookies.\n\n## how?\n\n```\nnpm install cookie\n```\n\n```javascript\nvar cookie = require('cookie');\n\nvar hdr = cookie.serialize('foo', 'bar');\n// hdr = 'foo=bar';\n\nvar cookies = cookie.parse('foo=bar; cat=meow; dog=ruff');\n// cookies = { foo: 'bar', cat: 'meow', dog: 'ruff' };\n```\n\n## more\n\nThe serialize function takes a third parameter, an object, to set cookie options. See the RFC for valid values.\n\n### path\n> cookie path\n\n### expires\n> absolute expiration date for the cookie (Date object)\n\n### maxAge\n> relative max age of the cookie from when the client receives it (seconds)\n\n### domain\n> domain for the cookie\n\n### secure\n> true or false\n\n### httpOnly\n> true or false\n\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/shtylman/node-cookie/issues" - }, - "homepage": "https://github.com/shtylman/node-cookie", - "_id": "cookie@0.1.0", - "dist": { - "shasum": "47533ff33776b0b380de876f5dc9702b22b39987" - }, - "_from": "cookie@0.1.0", - "_resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz" -} diff --git a/node_modules/express/node_modules/debug/index.js b/node_modules/express/node_modules/debug/index.js deleted file mode 100644 index e02c13b7f..000000000 --- a/node_modules/express/node_modules/debug/index.js +++ /dev/null @@ -1,5 +0,0 @@ -if ('undefined' == typeof window) { - module.exports = require('./lib/debug'); -} else { - module.exports = require('./debug'); -} diff --git a/node_modules/express/node_modules/debug/package.json b/node_modules/express/node_modules/debug/package.json deleted file mode 100644 index 24abce648..000000000 --- a/node_modules/express/node_modules/debug/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "debug", - "version": "0.7.4", - "repository": { - "type": "git", - "url": "git://github.com/visionmedia/debug.git" - }, - "description": "small debugging utility", - "keywords": [ - "debug", - "log", - "debugger" - ], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "*" - }, - "main": "lib/debug.js", - "browser": "./debug.js", - "engines": { - "node": "*" - }, - "files": [ - "lib/debug.js", - "debug.js", - "index.js" - ], - "component": { - "scripts": { - "debug/index.js": "index.js", - "debug/debug.js": "debug.js" - } - }, - "readme": "# debug\n\n tiny node.js debugging utility modelled after node core's debugging technique.\n\n## Installation\n\n```\n$ npm install debug\n```\n\n## Usage\n\n With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility.\n \nExample _app.js_:\n\n```js\nvar debug = require('debug')('http')\n , http = require('http')\n , name = 'My App';\n\n// fake app\n\ndebug('booting %s', name);\n\nhttp.createServer(function(req, res){\n debug(req.method + ' ' + req.url);\n res.end('hello\\n');\n}).listen(3000, function(){\n debug('listening');\n});\n\n// fake worker of some kind\n\nrequire('./worker');\n```\n\nExample _worker.js_:\n\n```js\nvar debug = require('debug')('worker');\n\nsetInterval(function(){\n debug('doing some work');\n}, 1000);\n```\n\n The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples:\n\n ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png)\n\n ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png)\n\n## Millisecond diff\n\n When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the \"+NNNms\" will show you how much time was spent between calls.\n\n ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png)\n\n When stderr is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below:\n _(NOTE: Debug now uses stderr instead of stdout, so the correct shell command for this example is actually `DEBUG=* node example/worker 2> out &`)_\n \n ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png)\n \n## Conventions\n\n If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use \":\" to separate features. For example \"bodyParser\" from Connect would then be \"connect:bodyParser\". \n\n## Wildcards\n\n The \"*\" character may be used as a wildcard. Suppose for example your library has debuggers named \"connect:bodyParser\", \"connect:compress\", \"connect:session\", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.\n\n You can also exclude specific debuggers by prefixing them with a \"-\" character. For example, `DEBUG=* -connect:*` would include all debuggers except those starting with \"connect:\".\n\n## Browser support\n\n Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. \n\n```js\na = debug('worker:a');\nb = debug('worker:b');\n\nsetInterval(function(){\n a('doing some work');\n}, 1000);\n\nsetInterval(function(){\n a('doing some work');\n}, 1200);\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/debug/issues" - }, - "homepage": "https://github.com/visionmedia/debug", - "_id": "debug@0.7.4", - "_from": "debug@>= 0.7.3 < 1" -} diff --git a/node_modules/express/node_modules/fresh/package.json b/node_modules/express/node_modules/fresh/package.json deleted file mode 100644 index 473dc4d69..000000000 --- a/node_modules/express/node_modules/fresh/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "fresh", - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca", - "url": "http://tjholowaychuk.com" - }, - "description": "HTTP response freshness testing", - "version": "0.2.0", - "main": "index.js", - "repository": { - "type": "git", - "url": "https://github.com/visionmedia/node-fresh.git" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "readme": "\n# node-fresh\n\n HTTP response freshness testing\n\n## fresh(req, res)\n\n Check freshness of `req` and `res` headers.\n\n When the cache is \"fresh\" __true__ is returned,\n otherwise __false__ is returned to indicate that\n the cache is now stale.\n\n## Example:\n\n```js\nvar req = { 'if-none-match': 'tobi' };\nvar res = { 'etag': 'luna' };\nfresh(req, res);\n// => false\n\nvar req = { 'if-none-match': 'tobi' };\nvar res = { 'etag': 'tobi' };\nfresh(req, res);\n// => true\n```\n\n## Installation\n\n```\n$ npm install fresh\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/node-fresh/issues" - }, - "homepage": "https://github.com/visionmedia/node-fresh", - "_id": "fresh@0.2.0", - "dist": { - "shasum": "a4591b7f0ce1ad3083cb13b06d189a8d0e2c925c" - }, - "_from": "fresh@0.2.0", - "_resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz" -} diff --git a/node_modules/express/node_modules/merge-descriptors/package.json b/node_modules/express/node_modules/merge-descriptors/package.json deleted file mode 100644 index e6325f7b6..000000000 --- a/node_modules/express/node_modules/merge-descriptors/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "merge-descriptors", - "description": "Merge objects using descriptors", - "version": "0.0.1", - "author": { - "name": "Jonathan Ong", - "email": "me@jongleberry.com", - "url": "http://jongleberry.com" - }, - "license": "MIT", - "repository": { - "type": "git", - "url": "https://github.com/jonathanong/merge-descriptors.git" - }, - "bugs": { - "url": "https://github.com/jonathanong/merge-descriptors/issues" - }, - "scripts": { - "test": "make test;" - }, - "readme": "# Merge Descriptors [![Build Status](https://travis-ci.org/jonathanong/merge-descriptors.png)](https://travis-ci.org/jonathanong/merge-descriptors)\n\nMerge objects using descriptors.\n\n```js\nvar thing = {\n get name() {\n return 'jon'\n }\n}\n\nvar animal = {\n\n}\n\nmerge(animal, thing)\n\nanimal.name === 'jon'\n```\n\n## API\n\n### merge(destination, source)\n\nOverwrites `destination`'s descriptors with `source`'s.\n\n## License\n\nThe MIT License (MIT)\n\nCopyright (c) 2013 Jonathan Ong me@jongleberry.com\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.", - "readmeFilename": "README.md", - "homepage": "https://github.com/jonathanong/merge-descriptors", - "_id": "merge-descriptors@0.0.1", - "dist": { - "shasum": "c6594e61b3fda36889f00404dfcb32624b8c5a63" - }, - "_from": "merge-descriptors@0.0.1", - "_resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz" -} diff --git a/node_modules/express/node_modules/methods/package.json b/node_modules/express/node_modules/methods/package.json deleted file mode 100644 index 129cbf7b3..000000000 --- a/node_modules/express/node_modules/methods/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "methods", - "version": "0.1.0", - "description": "HTTP methods that node supports", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "keywords": [ - "http", - "methods" - ], - "author": { - "name": "TJ Holowaychuk" - }, - "license": "MIT", - "repository": { - "type": "git", - "url": "git://github.com/visionmedia/node-methods.git" - }, - "readme": "\n# Methods\n\n HTTP verbs that node core's parser supports.\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/node-methods/issues" - }, - "homepage": "https://github.com/visionmedia/node-methods", - "_id": "methods@0.1.0", - "dist": { - "shasum": "e0bd5232e34655efe6840453a92a45d4b030719e" - }, - "_from": "methods@0.1.0", - "_resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz" -} diff --git a/node_modules/express/node_modules/mkdirp/package.json b/node_modules/express/node_modules/mkdirp/package.json deleted file mode 100644 index 38b3a4dc1..000000000 --- a/node_modules/express/node_modules/mkdirp/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "mkdirp", - "description": "Recursively mkdir, like `mkdir -p`", - "version": "0.3.5", - "author": { - "name": "James Halliday", - "email": "mail@substack.net", - "url": "http://substack.net" - }, - "main": "./index", - "keywords": [ - "mkdir", - "directory" - ], - "repository": { - "type": "git", - "url": "http://github.com/substack/node-mkdirp.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "devDependencies": { - "tap": "~0.4.0" - }, - "license": "MIT", - "readme": "# mkdirp\n\nLike `mkdir -p`, but in node.js!\n\n[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp)\n\n# example\n\n## pow.js\n\n```js\nvar mkdirp = require('mkdirp');\n \nmkdirp('/tmp/foo/bar/baz', function (err) {\n if (err) console.error(err)\n else console.log('pow!')\n});\n```\n\nOutput\n\n```\npow!\n```\n\nAnd now /tmp/foo/bar/baz exists, huzzah!\n\n# methods\n\n```js\nvar mkdirp = require('mkdirp');\n```\n\n## mkdirp(dir, mode, cb)\n\nCreate a new directory and any necessary subdirectories at `dir` with octal\npermission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\n`cb(err, made)` fires with the error or the first directory `made`\nthat had to be created, if any.\n\n## mkdirp.sync(dir, mode)\n\nSynchronously create a new directory and any necessary subdirectories at `dir`\nwith octal permission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\nReturns the first directory that had to be created, if any.\n\n# install\n\nWith [npm](http://npmjs.org) do:\n\n```\nnpm install mkdirp\n```\n\n# license\n\nMIT\n", - "readmeFilename": "readme.markdown", - "bugs": { - "url": "https://github.com/substack/node-mkdirp/issues" - }, - "homepage": "https://github.com/substack/node-mkdirp", - "_id": "mkdirp@0.3.5", - "dist": { - "shasum": "b83530397e309beac8bb6614fd1d35ebd5c06fc6" - }, - "_from": "mkdirp@0.3.5", - "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" -} diff --git a/node_modules/express/node_modules/range-parser/package.json b/node_modules/express/node_modules/range-parser/package.json deleted file mode 100644 index 276113784..000000000 --- a/node_modules/express/node_modules/range-parser/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "range-parser", - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca", - "url": "http://tjholowaychuk.com" - }, - "description": "Range header field string parser", - "version": "0.0.4", - "main": "index.js", - "dependencies": {}, - "devDependencies": { - "mocha": "*", - "should": "*" - }, - "readme": "\n# node-range-parser\n\n Range header field parser.\n\n## Example:\n\n```js\nassert(-1 == parse(200, 'bytes=500-20'));\nassert(-2 == parse(200, 'bytes=malformed'));\nparse(200, 'bytes=0-499').should.eql(arr('bytes', [{ start: 0, end: 199 }]));\nparse(1000, 'bytes=0-499').should.eql(arr('bytes', [{ start: 0, end: 499 }]));\nparse(1000, 'bytes=40-80').should.eql(arr('bytes', [{ start: 40, end: 80 }]));\nparse(1000, 'bytes=-500').should.eql(arr('bytes', [{ start: 500, end: 999 }]));\nparse(1000, 'bytes=-400').should.eql(arr('bytes', [{ start: 600, end: 999 }]));\nparse(1000, 'bytes=500-').should.eql(arr('bytes', [{ start: 500, end: 999 }]));\nparse(1000, 'bytes=400-').should.eql(arr('bytes', [{ start: 400, end: 999 }]));\nparse(1000, 'bytes=0-0').should.eql(arr('bytes', [{ start: 0, end: 0 }]));\nparse(1000, 'bytes=-1').should.eql(arr('bytes', [{ start: 999, end: 999 }]));\nparse(1000, 'items=0-5').should.eql(arr('items', [{ start: 0, end: 5 }]));\nparse(1000, 'bytes=40-80,-1').should.eql(arr('bytes', [{ start: 40, end: 80 }, { start: 999, end: 999 }]));\n```\n\n## Installation\n\n```\n$ npm install range-parser\n```", - "readmeFilename": "Readme.md", - "_id": "range-parser@0.0.4", - "dist": { - "shasum": "83d826681e378aac53688dca7947bd9f9c96fe77" - }, - "_from": "range-parser@0.0.4", - "_resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz" -} diff --git a/node_modules/express/node_modules/send/node_modules/mime/package.json b/node_modules/express/node_modules/send/node_modules/mime/package.json deleted file mode 100644 index a97593921..000000000 --- a/node_modules/express/node_modules/send/node_modules/mime/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "author": { - "name": "Robert Kieffer", - "email": "robert@broofa.com", - "url": "http://github.com/broofa" - }, - "contributors": [ - { - "name": "Benjamin Thomas", - "email": "benjamin@benjaminthomas.org", - "url": "http://github.com/bentomas" - } - ], - "dependencies": {}, - "description": "A comprehensive library for mime-type mapping", - "devDependencies": {}, - "keywords": [ - "util", - "mime" - ], - "main": "mime.js", - "name": "mime", - "repository": { - "url": "https://github.com/broofa/node-mime", - "type": "git" - }, - "version": "1.2.11", - "readme": "# mime\n\nComprehensive MIME type mapping API. Includes all 600+ types and 800+ extensions defined by the Apache project, plus additional types submitted by the node.js community.\n\n## Install\n\nInstall with [npm](http://github.com/isaacs/npm):\n\n npm install mime\n\n## API - Queries\n\n### mime.lookup(path)\nGet the mime type associated with a file, if no mime type is found `application/octet-stream` is returned. Performs a case-insensitive lookup using the extension in `path` (the substring after the last '/' or '.'). E.g.\n\n var mime = require('mime');\n\n mime.lookup('/path/to/file.txt'); // => 'text/plain'\n mime.lookup('file.txt'); // => 'text/plain'\n mime.lookup('.TXT'); // => 'text/plain'\n mime.lookup('htm'); // => 'text/html'\n\n### mime.default_type\nSets the mime type returned when `mime.lookup` fails to find the extension searched for. (Default is `application/octet-stream`.)\n\n### mime.extension(type)\nGet the default extension for `type`\n\n mime.extension('text/html'); // => 'html'\n mime.extension('application/octet-stream'); // => 'bin'\n\n### mime.charsets.lookup()\n\nMap mime-type to charset\n\n mime.charsets.lookup('text/plain'); // => 'UTF-8'\n\n(The logic for charset lookups is pretty rudimentary. Feel free to suggest improvements.)\n\n## API - Defining Custom Types\n\nThe following APIs allow you to add your own type mappings within your project. If you feel a type should be included as part of node-mime, see [requesting new types](https://github.com/broofa/node-mime/wiki/Requesting-New-Types).\n\n### mime.define()\n\nAdd custom mime/extension mappings\n\n mime.define({\n 'text/x-some-format': ['x-sf', 'x-sft', 'x-sfml'],\n 'application/x-my-type': ['x-mt', 'x-mtt'],\n // etc ...\n });\n\n mime.lookup('x-sft'); // => 'text/x-some-format'\n\nThe first entry in the extensions array is returned by `mime.extension()`. E.g.\n\n mime.extension('text/x-some-format'); // => 'x-sf'\n\n### mime.load(filepath)\n\nLoad mappings from an Apache \".types\" format file\n\n mime.load('./my_project.types');\n\nThe .types file format is simple - See the `types` dir for examples.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/broofa/node-mime/issues" - }, - "homepage": "https://github.com/broofa/node-mime", - "_id": "mime@1.2.11", - "_from": "mime@~1.2.9" -} diff --git a/node_modules/express/node_modules/send/package.json b/node_modules/express/node_modules/send/package.json deleted file mode 100644 index 4d7780473..000000000 --- a/node_modules/express/node_modules/send/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "send", - "version": "0.1.4", - "description": "Better streaming static file server with Range and conditional-GET support", - "keywords": [ - "static", - "file", - "server" - ], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "dependencies": { - "debug": "*", - "mime": "~1.2.9", - "fresh": "0.2.0", - "range-parser": "0.0.4" - }, - "devDependencies": { - "mocha": "*", - "should": "*", - "supertest": "0.0.1", - "connect": "2.x" - }, - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/visionmedia/send.git" - }, - "main": "index", - "readme": "# send\n\n Send is Connect's `static()` extracted for generalized use, a streaming static file\n server supporting partial responses (Ranges), conditional-GET negotiation, high test coverage, and granular events which may be leveraged to take appropriate actions in your application or framework.\n\n## Installation\n\n $ npm install send\n\n## Examples\n\n Small:\n\n```js\nvar http = require('http');\nvar send = require('send');\n\nvar app = http.createServer(function(req, res){\n send(req, req.url).pipe(res);\n}).listen(3000);\n```\n\n Serving from a root directory with custom error-handling:\n\n```js\nvar http = require('http');\nvar send = require('send');\nvar url = require('url');\n\nvar app = http.createServer(function(req, res){\n // your custom error-handling logic:\n function error(err) {\n res.statusCode = err.status || 500;\n res.end(err.message);\n }\n\n // your custom directory handling logic:\n function redirect() {\n res.statusCode = 301;\n res.setHeader('Location', req.url + '/');\n res.end('Redirecting to ' + req.url + '/');\n }\n\n // transfer arbitrary files from within\n // /www/example.com/public/*\n send(req, url.parse(req.url).pathname)\n .root('/www/example.com/public')\n .on('error', error)\n .on('directory', redirect)\n .pipe(res);\n}).listen(3000);\n```\n\n## API\n\n### Events\n\n - `error` an error occurred `(err)`\n - `directory` a directory was requested\n - `file` a file was requested `(path, stat)`\n - `stream` file streaming has started `(stream)`\n - `end` streaming has completed\n\n### .root(dir)\n\n Serve files relative to `path`. Aliased as `.from(dir)`.\n\n### .index(path)\n\n By default send supports \"index.html\" files, to disable this\n invoke `.index(false)` or to supply a new index pass a string.\n\n### .maxage(ms)\n\n Provide a max-age in milliseconds for http caching, defaults to 0.\n\n### .hidden(bool)\n\n Enable or disable transfer of hidden files, defaults to false.\n\n## Error-handling\n\n By default when no `error` listeners are present an automatic response will be made, otherwise you have full control over the response, aka you may show a 5xx page etc.\n\n## Caching\n\n It does _not_ perform internal caching, you should use a reverse proxy cache such\n as Varnish for this, or those fancy things called CDNs. If your application is small enough that it would benefit from single-node memory caching, it's small enough that it does not need caching at all ;).\n\n## Debugging\n\n To enable `debug()` instrumentation output export __DEBUG__:\n\n```\n$ DEBUG=send node app\n```\n\n## Running tests\n\n```\n$ npm install\n$ make test\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/send/issues" - }, - "homepage": "https://github.com/visionmedia/send", - "_id": "send@0.1.4", - "dist": { - "shasum": "a35fd61a4d0cf79096f3ec58358fce38023f2dd8" - }, - "_from": "send@0.1.4", - "_resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz" -} diff --git a/node_modules/express/package.json b/node_modules/express/package.json index c845653fa..1b0737df0 100644 --- a/node_modules/express/package.json +++ b/node_modules/express/package.json @@ -1,11 +1,38 @@ { - "name": "express", - "description": "Sinatra inspired web development framework", - "version": "3.4.8", + "_from": "express@3.4.8", + "_id": "express@3.4.8", + "_inBundle": false, + "_integrity": "sha1-qnqJht4HBTM39Lxe2aZFPZzI4uE=", + "_location": "/express", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "express@3.4.8", + "name": "express", + "escapedName": "express", + "rawSpec": "3.4.8", + "saveSpec": null, + "fetchSpec": "3.4.8" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", + "_shasum": "aa7a8986de07053337f4bc5ed9a6453d9cc8e2e1", + "_spec": "express@3.4.8", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton", "author": { "name": "TJ Holowaychuk", "email": "tj@vision-media.ca" }, + "bin": { + "express": "bin/express" + }, + "bugs": { + "url": "https://github.com/visionmedia/express/issues" + }, + "bundleDependencies": false, "contributors": [ { "name": "TJ Holowaychuk", @@ -25,30 +52,36 @@ } ], "dependencies": { - "connect": "2.12.0", + "buffer-crc32": "0.2.1", "commander": "1.3.2", - "range-parser": "0.0.4", - "mkdirp": "0.3.5", + "connect": "2.12.0", "cookie": "0.1.0", - "buffer-crc32": "0.2.1", - "fresh": "0.2.0", - "methods": "0.1.0", - "send": "0.1.4", "cookie-signature": "1.0.1", + "debug": ">= 0.7.3 < 1", + "fresh": "0.2.0", "merge-descriptors": "0.0.1", - "debug": ">= 0.7.3 < 1" + "methods": "0.1.0", + "mkdirp": "0.3.5", + "range-parser": "0.0.4", + "send": "0.1.4" }, + "deprecated": false, + "description": "Sinatra inspired web development framework", "devDependencies": { + "connect-redis": "~1.4.5", "ejs": "~0.8.4", - "mocha": "~1.15.1", - "jade": "~0.30.0", "hjs": "~0.0.6", - "stylus": "~0.40.0", - "should": "~2.1.1", - "connect-redis": "~1.4.5", + "jade": "~0.30.0", "marked": "0.2.10", + "mocha": "~1.15.1", + "should": "~2.1.1", + "stylus": "~0.40.0", "supertest": "~0.8.1" }, + "engines": { + "node": ">= 0.8.0" + }, + "homepage": "https://github.com/visionmedia/express#readme", "keywords": [ "express", "framework", @@ -60,28 +93,16 @@ "app", "api" ], + "license": "MIT", + "main": "index", + "name": "express", "repository": { "type": "git", - "url": "git://github.com/visionmedia/express" - }, - "main": "index", - "bin": { - "express": "./bin/express" + "url": "git://github.com/visionmedia/express.git" }, "scripts": { "prepublish": "npm prune", "test": "make test" }, - "engines": { - "node": ">= 0.8.0" - }, - "license": "MIT", - "readme": "[![express logo](http://f.cl.ly/items/0V2S1n0K1i3y1c122g04/Screen%20Shot%202012-04-11%20at%209.59.42%20AM.png)](http://expressjs.com/)\n\n Fast, unopinionated, minimalist web framework for [node](http://nodejs.org).\n\n [![Build Status](https://secure.travis-ci.org/visionmedia/express.png)](http://travis-ci.org/visionmedia/express) [![Gittip](http://img.shields.io/gittip/visionmedia.png)](https://www.gittip.com/visionmedia/)\n\n```js\nvar express = require('express');\nvar app = express();\n\napp.get('/', function(req, res){\n res.send('Hello World');\n});\n\napp.listen(3000);\n```\n\n## Installation\n\n $ npm install -g express\n\n## Quick Start\n\n The quickest way to get started with express is to utilize the executable `express(1)` to generate an application as shown below:\n\n Create the app:\n\n $ npm install -g express\n $ express /tmp/foo && cd /tmp/foo\n\n Install dependencies:\n\n $ npm install\n\n Start the server:\n\n $ node app\n\n## Features\n\n * Built on [Connect](http://github.com/senchalabs/connect)\n * Robust routing\n * HTTP helpers (redirection, caching, etc)\n * View system supporting 14+ template engines\n * Content negotiation\n * Focus on high performance\n * Environment based configuration\n * Executable for generating applications quickly\n * High test coverage\n\n## Philosophy\n\n The Express philosophy is to provide small, robust tooling for HTTP servers, making\n it a great solution for single page applications, web sites, hybrids, or public\n HTTP APIs.\n\n Built on Connect, you can use _only_ what you need, and nothing more. Applications\n can be as big or as small as you like, even a single file. Express does\n not force you to use any specific ORM or template engine. With support for over\n 14 template engines via [Consolidate.js](http://github.com/visionmedia/consolidate.js),\n you can quickly craft your perfect framework.\n\n## More Information\n\n * [Website and Documentation](http://expressjs.com/) stored at [visionmedia/expressjs.com](https://github.com/visionmedia/expressjs.com)\n * Join #express on freenode\n * [Google Group](http://groups.google.com/group/express-js) for discussion\n * Follow [tjholowaychuk](http://twitter.com/tjholowaychuk) on twitter for updates\n * Visit the [Wiki](http://github.com/visionmedia/express/wiki)\n * [Русскоязычная документация](http://jsman.ru/express/)\n * Run express examples [online](https://runnable.com/express)\n\n## Viewing Examples\n\nClone the Express repo, then install the dev dependencies to install all the example / test suite dependencies:\n\n $ git clone git://github.com/visionmedia/express.git --depth 1\n $ cd express\n $ npm install\n\nThen run whichever tests you want:\n\n $ node examples/content-negotiation\n\nYou can also view live examples here:\n\n
\n\n## Running Tests\n\nTo run the test suite, first invoke the following command within the repo, installing the development dependencies:\n\n $ npm install\n\nThen run the tests:\n\n $ make test\n\n## Contributors\n\n https://github.com/visionmedia/express/graphs/contributors\n\n## License\n\n(The MIT License)\n\nCopyright (c) 2009-2012 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "Readme.md", - "bugs": { - "url": "https://github.com/visionmedia/express/issues" - }, - "homepage": "https://github.com/visionmedia/express", - "_id": "express@3.4.8", - "_from": "express@3.x" + "version": "3.4.8" } diff --git a/node_modules/express3-handlebars/.npmignore 2 b/node_modules/express3-handlebars/.npmignore 2 new file mode 100644 index 000000000..1ca957177 --- /dev/null +++ b/node_modules/express3-handlebars/.npmignore 2 @@ -0,0 +1,2 @@ +node_modules/ +npm-debug.log diff --git a/node_modules/express3-handlebars/HISTORY 2.md b/node_modules/express3-handlebars/HISTORY 2.md new file mode 100644 index 000000000..dffabcceb --- /dev/null +++ b/node_modules/express3-handlebars/HISTORY 2.md @@ -0,0 +1,149 @@ +Express3 Handlebars Change History +================================== + +0.5.0 (2013-07-25) +------------------ + +* Added `loadTemplates()` method which will load all the templates in a + specified directory. + +* Added support for multiple partials directories. This enables the + `partialsDir` configuration property to be specified as an *array* of + directories, and loads all of the templates in each one. + + This feature allows an app's partials to be split up in multiple directories, + which is common if an app has some shared partials which will also be exposed + to the client, and some server-side-only partials. + +* Added runnable code examples in this package's "examples/" directory. + +* Improved optional argument handling in public methods to treat Express + `locals` function objects as `options` and not `callback` params to the method + being invoked. + + +0.4.1 (2013-04-06) +------------------ + +* Updated `async` dependency to the latest stable minor version: "~0.2". + + +0.4.0 (2013-03-24) +------------------ + +* (!) Removed the following "get" -> "load" aliases which kept in v0.2.0 for + back-compat: + + * `getPartials()` -> `loadPartials()` + * `getTemplate()` -> `loadTemplate()` + + This is the future version where these aliases have been removed. + +* (!) Renamed `lib/express3-handlebars.js` -> `lib/express-handlebars.js`. + +* Exposed `getHandlebarsSemver()` function as a static property on the + `ExpressHandlebars` constructor. + +* Rearranged module exports by moving the engine factory function to `index.js`, + making the `lib/express3-handlebars.js` module only responsible for exporting + the `ExpressHandlebars` constructor. + + +0.3.3 (2013-03-22) +------------------ + +* Updated internal `_resolveLayoutPath()` method to take the full + `options`/locals objects which the view is rendered with. This makes it easier + to override. (Issue #14) + + +0.3.2 (2013-02-20) +------------------ + +* Transfered ownership and copyright to Yahoo! Inc. This software is still free + to use, and is now licensed under the Yahoo! Inc. BSD license. + + +0.3.1 (2013-02-18) +------------------ + +* Updated README with info about `options.helpers` for `render()` and + `renderView()` docs. + + +0.3.0 (2013-02-18) +------------------ + +* Added support for render-level helpers, via `options.helpers`, to the + `render()` and `renderView()` methods. Handlebars' `registerHelper()` function + now works as expected and does not have to be called before the + `ExpressHandlebars` instance is created. Helpers are now merged from: + `handlebars.helpers` (global), `helpers` (instance), and `options.helpers` + (render-level) before a template is rendered; this provides flexibility at + all levels. + +* Added `handlebarsVersion` property which is the version number of `handlebars` + as a semver. This is used internally to branch on certain operations which + differ between Handlebars releases. + + +0.2.3 (2013-02-13) +------------------ + +* Fixed issue with naming nested partials when using the latest version of + Handlebars (1.0.rc.2). Previous versions require a hack to replace "/"s with + "."s in partial names, and the latest version of Handlebars fixes that bug. + This hack will only be applied to old versions of Handlebars. (Issue #9) + + +0.2.2 (2013-02-04) +------------------ + +* Updated README with the public method renames which happened v0.2.0. + + +0.2.1 (2013-02-04) +------------------ + +* `extname`, `layoutsDir`, and `partialsDir` property values will now reference + the values on the prototype unless an `ExpressHandlebars` instance is + constructed with config values for these properties. + +* Improved clarity of method implementations, and exposed more override "hooks" + via new private methods: `_getPartialName()`, `_renderTemplate()`, and + `_resolveLayoutPath()`. + + +0.2.0 (2013-02-01) +------------------ + +* (!) Renamed methods prefixed with "get" to "load" for clarity: + + * `getPartials()` -> `loadPartials()` + * `getTemplate()` -> `loadTemplate()` + + Aliases for these methods have been created to maintain back-compat, but the + old method names are now deprecated will be removed in the future. (Issue #5) + +* All paths are resolved before checking in or adding to caches. (Issue #1) + +* Force `{precompiled: false}` option within `render()` and `renderView()` + methods to prevent trying to render with precompiled templates. (Issue #2) + + +0.1.2 (2013-01-10) +------------------ + +* Tweaked formatting of README documentation. + + +0.1.1 (2013-01-10) +------------------ + +* Added README documentation. + + +0.1.0 (2013-01-07) +------------------ + +* Initial release. diff --git a/node_modules/express3-handlebars/LICENSE 2 b/node_modules/express3-handlebars/LICENSE 2 new file mode 100644 index 000000000..6fd95450c --- /dev/null +++ b/node_modules/express3-handlebars/LICENSE 2 @@ -0,0 +1,27 @@ +Copyright 2013 Yahoo! Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the Yahoo! Inc. nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL YAHOO! INC. BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/express3-handlebars/README 2.md b/node_modules/express3-handlebars/README 2.md new file mode 100644 index 000000000..357896e71 --- /dev/null +++ b/node_modules/express3-handlebars/README 2.md @@ -0,0 +1,612 @@ +Express3 Handlebars +=================== + +A [Handlebars][] view engine for [Express][] which doesn't suck. + +[![Dependency Status](https://david-dm.org/ericf/express3-handlebars.png)][status] + + +[Express]: https://github.com/visionmedia/express +[Handlebars]: https://github.com/wycats/handlebars.js +[status]: https://david-dm.org/ericf/express3-handlebars + + +Goals & Design +-------------- + +I created this project out of frustration with the existing Handlebars view +engines for Express. As of version 3.x, Express got out of the business of being +a generic view engine — this was a great decision — leaving developers to +implement the concepts of layouts, partials, and doing file I/O for their +template engines of choice. + +### Goals and Features + +After building a half-dozen Express apps, I developed requirements and opinions +about what a Handlebars view engine should provide and how it should be +implemented. The following is that list: + +* Add back the concept of "layout", which was removed in Express 3.x. + +* Add back the concept of "partials" via Handlebars' partials mechanism. + +* Support a directory of partials; e.g., `{{> foo/bar}}` which exists on the + file system at `views/partials/foo/bar.handlebars` by default. + +* Smart file system I/O and template caching. When in development, templates are + always loaded from disk. In production, raw files and compiled templates are + cached, including partials. + +* All async and non-blocking. File system I/O is slow and servers should not be + blocked from handling requests while reading from disk. I/O queuing is used to + avoid doing unnecessary work. + +* Ability to expose precompiled templates and partials to the client, enabling + template sharing and reuse. + +* Ability to use a different Handlebars module/implementation other than the + Handlebars npm package. + +### Package Design + +This package was designed to work great for both the simple and complex use +cases. I _intentionally_ made sure the full implementation is exposed and is +easily overrideable. + +The package exports a function which can be invoked with no arguments or with a +`config` object and it will return a function (closed over sane defaults) which +can be registered with an Express app. It's an engine factory function. + +This exported engine factory has two properties which expose the underlying +implementation: + +* `ExpressHandlebars()`: The constructor function which holds the internal + implementation on its `prototype`. This produces instance objects which store + their configuration, `compiled` and `precompiled` templates, and expose an + `engine()` function which can be registered with an Express app. + +* `create()`: A convenience factory function for creating `ExpressHandlebars` + instances. + +An instance-based approach is used so that multiple `ExpressHandlebars` +instances can be created with their own configuration, templates, partials, and +helpers. + + +Installation +------------ + +Install using npm: + +```shell +$ npm install express3-handlebars +``` + + +Usage +----- + +This view engine uses sane defaults that leverage the "Express-way" of +structuring an app's views. This makes it trivial to use in basic apps: + +### Basic Usage + +**Directory Structure:** + +``` +. +├── app.js +└── views + ├── home.handlebars + └── layouts + └── main.handlebars + +2 directories, 3 files +``` + +**app.js:** + +Creates a super simple Express app which shows the basic way to register a +Handlebars view engine using this package. + +```javascript +var express = require('express'), + exphbs = require('express3-handlebars'), + + app = express(); + +app.engine('handlebars', exphbs({defaultLayout: 'main'})); +app.set('view engine', 'handlebars'); + +app.get('/', function (req, res) { + res.render('home'); +}); + +app.listen(3000); +``` + +**views/layouts/main.handlebars:** + +The main layout is the HTML page wrapper which can be reused for the different +views of the app. `{{{body}}}` is used as a placeholder for where the main +content should be rendered. + +```html + + + + + Example App + + + + {{{body}}} + + + +``` + +**views/home.handlebars:** + +The content for the app's home view which will be rendered into the layout's +`{{{body}}}`. + +```html +

Example App: Home

+``` + +#### Running the Example + +The above example is bundled in this package's [examples directory][], where +it can be run by: + +```shell +$ cd examples/basic/ && node app +``` + +### Using Instances + +Another way to use this view engine is to create an instance(s) of +`ExpressHandlebars`, allowing access to the full API: + +```javascript +var express = require('express'), + exphbs = require('express3-handlebars'), + + app = express(), + hbs = exphbs.create({ /* config */ }); + +// Register `hbs.engine` with the Express app. +app.engine('handlebars', hbs.engine); +app.set('view engine', 'handlebars'); + +// ...still have a reference to `hbs`, on which methods like `loadPartials()` +// can be called. +``` + +**Note:** The [Advanced Usage][] example demonstrates how `ExpressHandlebars` +instances can be leveraged. + +### Template Caching + +This view engine uses a smart template caching strategy. In development, +templates will always be loaded from disk, i.e., no caching. In production, raw +files and compiled Handlebars templates are aggressively cached. + +The easiest way to control template/view caching is through Express' +[view cache setting][]: + +```javascript +app.enable('view cache'); +``` + +Express enables this setting by default when in production mode, i.e., +`process.env.NODE_ENV === "production"`. + +**Note:** All of the public API methods accept `options.cache`, which gives +control over caching when calling these methods directly. + +### Layouts + +A layout is simply a Handlebars template with a `{{{body}}}` placeholder. +Usually it will be an HTML page wrapper into which views will be rendered. + +This view engine adds back the concept of "layout", which was removed in Express +3.x. It can be configured with a path to the layouts directory, by default it's +set to `"views/layouts/"`. + +There are two ways to set a default layout: configuring the view engine's +`defaultLayout` property, or setting [Express locals][] `app.locals.layout`. + +The layout into which a view should be rendered can be overridden per-request +by assigning a different value to the `layout` request local. The following +will render the "home" view with no layout: + +```javascript +app.get('/', function (req, res, next) { + res.render('home', {layout: false}); +}); +``` + +### Helpers + +Helper functions, or "helpers" are functions that can be +[registered with Handlebars][] and can be called within a template. Helpers can +be used for transforming output, iterating over data, etc. To keep with the +spirit of *logic-less* templates, helpers are the place where logic should be +defined. + +Handlebars ships with some [built-in helpers][], such as: `with`, `if`, `each`, +etc. Most application will need to extend this set of helpers to include +app-specific logic and transformations. Beyond defining global helpers on +`Handlebars`, this view engine supports `ExpressHandlebars` instance-level +helpers via the `helpers` configuration property, and render-level helpers via +`options.helpers` when calling the `render()` and `renderView()` methods. + +The following example shows helpers being specified at each level: + +**app.js:** + +Creates a super simple Express app which shows the basic way to register +`ExpressHandlebars` instance-level helpers, and override one at the +render-level. + +```javascript +var express = require('express'), + exphbs = require('express3-handlebars'), + + app = express(), + hbs; + +hbs = exphbs.create({ + // Specify helpers which are only registered on this instance. + helpers: { + foo: function () { return 'FOO!'; } + bar: function () { return 'BAR!'; } + } +}); + +app.engine('handlebars', hbs.engine); +app.set('view engine', 'handlebars'); + +app.get('/', function (req, res, next) { + res.render('home', { + showTitle: true, + + // Override `foo` helper only for this rendering. + helpers: { + foo: function () { return 'foo.'; } + } + }); +}); + +app.listen(3000); +``` + +**views/home.handlebars:** + +The app's home view which uses helper functions to help render the contents. + +```html + + + + + Example App - Home + + + + + {{#if showTitle}} +

Home

+ {{/if}} + + +

{{foo}}

+ + +

{{bar}}

+ + + +``` + +#### More on Helpers + +Refer to the [Handlebars website][] for more information on defining helpers: + +* [Expression Helpers][] +* [Block Helpers][] + + +[examples directory]: https://github.com/ericf/express3-handlebars/tree/master/examples +[view cache setting]: http://expressjs.com/api.html#app-settings +[Express locals]: http://expressjs.com/api.html#app.locals +[registered with Handlebars]: https://github.com/wycats/handlebars.js/#registering-helpers +[built-in helpers]: http://handlebarsjs.com/#builtins +[Handlebars website]: http://handlebarsjs.com/ +[Expression Helpers]: http://handlebarsjs.com/expressions.html#helpers +[Block Helpers]: http://handlebarsjs.com/block_helpers.html + + +API +--- + +### Configuration and Defaults + +There are two main ways to use this package: via its engine factory function, or +creating `ExpressHandlebars` instances; both use the same configuration +properties and defaults. + +```javascript +var exphbs = require('express3-handlebars'); + +// Using the engine factory: +exphbs({ /* config */ }); + +// Create an instance: +exphbs.create({ /* config */ }); +``` + +The following is the list of configuration properties and their default values +(if any): + +#### `defaultLayout` +The string name or path of a template in the `layoutsDir` to use as the default +layout. This is overridden by a `layout` specified in the app or response +`locals`. **Note:** A falsy value will render without a layout; e.g., +`res.render('home', {layout: false});`. + +#### `extname=".handlebars"` +The string name of the file extension used by the templates. + +#### `handlebars=require('handlebars')` +The Handlebars module/implementation. This allows for the `ExpressHandlebars` +instance to use a different Handlebars module/implementation than that provided +by the Handlebars npm package. + +#### `helpers` +An object which holds the helper functions used when rendering templates with +this `ExpressHandlebars` instance. When rendering a template, a collection of +helpers will be generated by merging: `handlebars.helpers` (global), `helpers` +(instance), and `options.helpers` (render-level). This allows Handlebars' +`registerHelper()` function to operate as expected, will providing two extra +levels over helper overrides. + +#### `layoutsDir="views/layouts/"` +The string path to the directory where the layout templates reside. + +#### `partialsDir="views/partials/"` +The string path to the directory where the partials templates reside. + +### Properties + +The public API properties are provided via `ExpressHandlebars` instances. In +additional to the properties listed in the **Configuration and Defaults** +section, the following are additional public properties: + +#### `compiled` +An object cache which holds compiled Handlebars template functions in the +format: `{"path/to/template": [Function]}`. + +#### `engine` +A function reference to the `renderView()` method which is bound to `this` +`ExpressHandlebars` instance. This bound function should be used when +registering this view engine with an Express app. + +#### `handlebarsVersion` +The version number of `handlebars` as a semver. This is unsed internally to +branch on certain operations which differ between Handlebars releases. + +#### `precompiled` +An object cache which holds precompiled Handlebars template strings in the +format: `{"path/to/template": [String]}`. + +### Methods + +The following is the list of public API methods provided via `ExpressHandlebars` +instances: + +#### `loadPartials(options|callback, [callback])` + +Retrieves the partials in the `partialsDir` and passes an object mapping the +partials in the form `{name: partial}` to the `callback`. + +By default each partial will be a compiled Handlebars template function. Use +`options.precompiled` to receive the partials as precompiled templates — this is +useful for sharing templates with client code. + +**Parameters:** + +* `[options]`: Optional object containing any of the following properties: + + * `[cache]`: Whether cached templates can be used if they have already been + requested. This is recommended for production to avoid unnecessary file I/O. + + * `[precompiled=false]`: Whether precompiled templates should be provided, + instead of compiled Handlebars template functions. + +* `callback`: Function to call once the partials are retrieved. + +The name of each partial corresponds to its location in `partialsDir`. For +example, consider the following directory structure: + +``` +views +└── partials + ├── foo + │   └── bar.handlebars + └── title.handlebars + +2 directories, 2 files +``` + +`loadPartials()` would produce the following result: + +```javascript +var hbs = require('express3-handlebars').create(); + +hbs.loadPartials(function (err, partials) { + console.log(partials); + // => { 'foo.bar': [Function], + // => title: [Function] } +}); +``` + +**Note:** The partial name `"foo.bar"` would ideally be `"foo/bar"`, but this is +being prevented by a [Handlebars bug][]. Once this bug is fixed, a future +version will use a "/" separator. Templates requiring the partial still use: +`{{> foo/bar}}`. + +#### `loadTemplate(filePath, options|callback, [callback])` + +Retrieves the template at the specified `filePath` and passes a compiled +Handlebars template function to the `callback`. + +Use `options.precompiled` to receive a precompiled Handlebars template. + +**Parameters:** + +* `filePath`: String path to the Handlebars template file. + +* `[options]`: Optional object containing any of the following properties: + + * `[cache]`: Whether a cached template can be used if it have already been + requested. This is recommended for production to avoid necessary file I/O. + + * `[precompiled=false]`: Whether a precompiled template should be provided, + instead of a compiled Handlebars template function. + +* `callback`: Function to call once the template is retrieved. + +#### `loadTemplates(dirPath, options|callback, [callback])` + +Retrieves the all the templates in the specified `dirPath` and passes an object +mapping the compiled templates in the form `{filename: template}` to the +`callback`. + +Use `options.precompiled` to receive precompiled Handlebars templates — this is +useful for sharing templates with client code. + +**Parameters:** + +* `dirPath`: String path to the directory containing Handlebars template files. + +* `[options]`: Optional object containing any of the following properties: + + * `[cache]`: Whether cached templates can be used if it have already been + requested. This is recommended for production to avoid necessary file I/O. + + * `[precompiled=false]`: Whether precompiled templates should be provided, + instead of a compiled Handlebars template function. + +* `callback`: Function to call once the templates are retrieved. + +#### `render(filePath, options|callback, [callback])` + +Renders the template at the specified `filePath` using this instance's `helpers` +and partials, and passes the resulting string to the `callback`. + +The `options` will be used both as the context in which the Handlebars template +is rendered, and to signal this view engine on how it should behave, e.g., +`options.cache = false` will load _always_ load the templates from disk. + +**Parameters:** + +* `filePath`: String path to the Handlebars template file. + +* `[options]`: Optional object which will serve as the context in which the + Handlebars template is rendered. It may also contain any of the following + properties which affect this view engine's behavior: + + * `[cache]`: Whether a cached template can be used if it have already been + requested. This is recommended for production to avoid unnecessary file I/O. + + * `[helpers]`: Render-level helpers should be merged with (and will override) + instance and global helper functions. + +* `callback`: Function to call once the template is retrieved. + +#### `renderView(viewPath, options|callback, [callback])` + +Renders the template at the specified `viewPath` as the `{{{body}}}` within the +layout specified by the `defaultLayout` or `options.layout`. Rendering will use +this instance's `helpers` and partials, and passes the resulting string to the +`callback`. + +This method is called by Express and is the main entry point into this Express +view engine implementation. It adds the concept of a "layout" and delegates +rendering to the `render()` method. + +The `options` will be used both as the context in which the Handlebars templates +are rendered, and to signal this view engine on how it should behave, e.g., +`options.cache=false` will load _always_ load the templates from disk. + +**Parameters:** + +* `viewPath`: String path to the Handlebars template file which should serve as + the `{{{body}}}` when using a layout. + +* `[options]`: Optional object which will serve as the context in which the + Handlebars templates are rendered. It may also contain any of the following + properties which affect this view engine's behavior: + + * `[cache]`: Whether cached templates can be used if they have already been + requested. This is recommended for production to avoid unnecessary file I/O. + + * `[helpers]`: Render-level helpers should be merged with (and will override) + instance and global helper functions. + + * `[layout]`: Optional string path to the Handlebars template file to be used + as the "layout". This overrides any `defaultLayout` value. Passing a falsy + value will render with no layout (even if a `defaultLayout` is defined). + +* `callback`: Function to call once the template is retrieved. + +### Statics + +The following is the list of static API properties and methods provided on the +`ExpressHandlebars` constructor: + +#### `getHandlebarsSemver(handlebars)` + +Returns a semver-compatible version string for the specified `handlebars` +module/implementation. + +This utility function is used to compute the value for an `ExpressHandlebars` +instance's `handlebarsVersion` property. + + +[Handlebars bug]: https://github.com/wycats/handlebars.js/pull/389 + + +Examples +-------- + +### [Basic Usage][] + +This example shows the most basic way to use this view engine. + +### [Advanced Usage][] + +This example is more comprehensive and shows how to use many of the features of +this view engine, including helpers, partials, multiple layouts, etc. + +As noted in the **Package Design** section, this view engine's implementation is +instance-based, and more advanced usages can take advantage of this. The +Advanced Usage example demonstrates how to use an `ExpressHandlebars` instance +to share templates with the client, among other features. + + +[Basic Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/basic +[Advanced Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/advanced + + +License +------- + +This software is free to use under the Yahoo! Inc. BSD license. +See the [LICENSE file][] for license text and copyright information. + + +[LICENSE file]: https://github.com/ericf/express3-handlebars/blob/master/LICENSE diff --git a/node_modules/express3-handlebars/examples/advanced/app 2.js b/node_modules/express3-handlebars/examples/advanced/app 2.js new file mode 100644 index 000000000..2b7d8ae9f --- /dev/null +++ b/node_modules/express3-handlebars/examples/advanced/app 2.js @@ -0,0 +1,99 @@ +var express = require('express'), + exphbs = require('../../'), // "express3-handlebars" + helpers = require('./lib/helpers'), + + app = express(), + hbs; + +// Create `ExpressHandlebars` instance with a default layout. +hbs = exphbs.create({ + defaultLayout: 'main', + helpers : helpers, + + // Uses multiple partials dirs, templates in "shared/templates/" are shared + // with the client-side of the app (see below). + partialsDir: [ + 'shared/templates/', + 'views/partials/' + ] +}); + +// Register `hbs` as our view engine using its bound `engine()` function. +app.engine('handlebars', hbs.engine); +app.set('view engine', 'handlebars'); + +// Middleware to expose the app's shared templates to the cliet-side of the app +// for pages which need them. +function exposeTemplates(req, res, next) { + // Uses the `ExpressHandlebars` instance to get the get the **precompiled** + // templates which will be shared with the client-side of the app. + hbs.loadTemplates('shared/templates/', { + cache : app.enabled('view cache'), + precompiled: true + }, function (err, templates) { + if (err) { return next(err); } + + // RegExp to remove the ".handlebars" extension from the template names. + var extRegex = new RegExp(hbs.extname + '$'); + + // Creates an array of templates which are exposed via + // `res.locals.templates`. + templates = Object.keys(templates).map(function (name) { + return { + name : name.replace(extRegex, ''), + template: templates[name] + }; + }); + + // Exposes the templates during view rendering. + if (templates.length) { + res.locals.templates = templates; + } + + next(); + }); +} + +app.get('/', function (req, res) { + res.render('home', { + title: 'Home' + }); +}); + +app.get('/yell', function (req, res) { + res.render('yell', { + title: 'Yell', + + // This `message` will be transformed by our `yell()` helper. + message: 'hello world' + }); +}); + +app.get('/exclaim', function (req, res) { + res.render('yell', { + title : 'Exclaim', + message: 'hello world', + + // This overrides _only_ the default `yell()` helper. + helpers: { + yell: function (msg) { + return (msg + '!!!'); + } + } + }); +}); + +app.get('/echo/:message?', exposeTemplates, function (req, res) { + res.render('echo', { + title : 'Echo', + message: req.params.message, + + // Overrides which layout to use, instead of the defaul "main" layout. + layout: 'shared-templates' + }); +}); + +app.use(express.static('public/')); +app.listen(3000); + +console.log('express3-handlebars example server listening on: 3000'); diff --git a/node_modules/express3-handlebars/index 2.js b/node_modules/express3-handlebars/index 2.js new file mode 100644 index 000000000..5546d2165 --- /dev/null +++ b/node_modules/express3-handlebars/index 2.js @@ -0,0 +1,13 @@ +var ExpressHandlebars = require('./lib/express-handlebars'); + +function exphbs(config) { + return exphbs.create(config).engine; +} + +exphbs.create = function (config) { + return new ExpressHandlebars(config); +}; + +exphbs.ExpressHandlebars = ExpressHandlebars; + +module.exports = exphbs; diff --git a/node_modules/express3-handlebars/lib/express-handlebars 2.js b/node_modules/express3-handlebars/lib/express-handlebars 2.js new file mode 100644 index 000000000..9d966eb9a --- /dev/null +++ b/node_modules/express3-handlebars/lib/express-handlebars 2.js @@ -0,0 +1,367 @@ +var async = require('async'), + fs = require('fs'), + glob = require('glob'), + path = require('path'), + semver = require('semver'); + +// -- Utilites ----------------------------------------------------------------- + +function extend(obj) { + Array.prototype.slice.call(arguments, 1).forEach(function (source) { + if (!source) { return; } + + for (var key in source) { + obj[key] = source[key]; + } + }); + + return obj; +} + +// -- Constructor -------------------------------------------------------------- + +function ExpressHandlebars(config) { + config || (config = {}); + + var handlebars = config.handlebars || require('handlebars'); + + if ('extname' in config) { this.extname = config.extname; } + if ('layoutsDir' in config) { this.layoutsDir = config.layoutsDir; } + if ('partialsDir' in config) { this.partialsDir = config.partialsDir; } + + this.defaultLayout = config.defaultLayout; + this.handlebars = handlebars; + this.helpers = config.helpers; + + this.handlebarsVersion = ExpressHandlebars.getHandlebarsSemver(handlebars); + + this.compiled = {}; + this.precompiled = {}; + + this.engine = this.renderView.bind(this); +} + +// -- Statics ------------------------------------------------------------------ + +ExpressHandlebars._dirCache = {}; +ExpressHandlebars._fileCache = {}; +ExpressHandlebars._pendingReads = {}; + +ExpressHandlebars.getHandlebarsSemver = function (handlebars) { + var version = handlebars.VERSION || ''; + + // Makes sure the Handlebars version is a valid semver. + if (version && !semver.valid(version)) { + version = version.replace(/(\d\.\d)\.(\D.*)/, '$1.0-$2'); + } + + return version; +}; + +// -- Prototype ---------------------------------------------------------------- + +extend(ExpressHandlebars.prototype, { + // -- Public Properties ---------------------------------------------------- + + extname : '.handlebars', + layoutsDir : 'views/layouts/', + partialsDir: 'views/partials/', + + // -- Public Methods ------------------------------------------------------- + + loadPartials: function (options, callback) { + if (arguments.length < 2 && typeof options === 'function') { + callback = options; + options = {}; + } + + options || (options = {}); + + function load(dirs, options, callback) { + Array.isArray(dirs) || (dirs = [dirs]); + var loadTemplates = this.loadTemplates.bind(this); + + async.map(dirs, function (dir, callback) { + loadTemplates(dir, options, callback); + }, callback); + } + + function mapPartials(dirs, callback) { + var getPartialName = this._getPartialName.bind(this), + partials; + + partials = dirs.reduce(function (partials, templates) { + Object.keys(templates).forEach(function (filePath) { + partials[getPartialName(filePath)] = templates[filePath]; + }); + + return partials; + }, {}); + + callback(null, partials); + } + + async.waterfall([ + load.bind(this, this.partialsDir, options), + mapPartials.bind(this) + ], callback); + }, + + loadTemplate: function (filePath, options, callback) { + filePath = path.resolve(filePath); + + if (arguments.length < 3 && typeof options === 'function') { + callback = options; + options = {}; + } + + options || (options = {}); + + var precompiled = options.precompiled, + cache = precompiled ? this.precompiled : this.compiled, + template = options.cache && cache[filePath], + compile; + + if (template) { + callback(null, template); + return; + } + + compile = this.handlebars[precompiled ? 'precompile' : 'compile']; + + this._loadFile(filePath, options, function (err, file) { + if (err) { return callback(err); } + + try { + template = cache[filePath] = compile(file); + callback(null, template); + } catch (ex) { + callback(ex); + } + }); + }, + + loadTemplates: function (dirPath, options, callback) { + if (arguments.length < 3 && typeof options === 'function') { + callback = options; + options = {}; + } + + options || (options = {}); + + function load(filePath, callback) { + this.loadTemplate(path.join(dirPath, filePath), options, callback); + } + + function mapTemplates(filePaths, callback) { + async.map(filePaths, load.bind(this), function (err, templates) { + if (err) { return callback(err); } + + var map = filePaths.reduce(function (map, filePath, i) { + map[filePath] = templates[i]; + return map; + }, {}); + + callback(null, map); + }); + } + + async.waterfall([ + this._loadDir.bind(this, dirPath, options), + mapTemplates.bind(this) + ], callback); + }, + + render: function (filePath, options, callback) { + if (arguments.length < 3 && typeof options === 'function') { + callback = options; + options = {}; + } + + options || (options = {}); + + var helpers = extend({}, + this.handlebars.helpers, this.helpers, options.helpers); + + function loadTemplates(callback) { + async.parallel({ + partials: this.loadPartials.bind(this, options), + template: this.loadTemplate.bind(this, filePath, options) + }, callback); + } + + function renderTemplate(templates, callback) { + this._renderTemplate(templates.template, options, { + helpers : helpers, + partials: templates.partials + }, callback); + } + + // Force `{precompiled: false}` option, before passing `options` along + // to `getPartials()` and `getTemplate()` methods. + if (options.precompiled) { + options = extend({}, options, {precompiled: false}); + } + + async.waterfall([ + loadTemplates.bind(this), + renderTemplate.bind(this) + ], callback); + }, + + renderView: function (viewPath, options, callback) { + if (arguments.length < 3 && typeof options === 'function') { + callback = options; + options = {}; + } + + options || (options = {}); + + var layoutPath = this._resolveLayoutPath(options); + + function renderLayout(body, callback) { + var context = extend({}, options, {body: body}); + this.render(layoutPath, context, callback); + } + + // Simple render when no layout is used. + if (!layoutPath) { + this.render.apply(this, arguments); + return; + } + + // Force `{precompiled: false}` option, before passing options along to + // `getPartials()` and `getTemplate()` methods. + if (options.precompiled) { + options = extend({}, options, {precompiled: false}); + } + + async.waterfall([ + this.render.bind(this, viewPath, options), + renderLayout.bind(this) + ], callback); + }, + + // -- Private Methods ------------------------------------------------------ + + _getPartialName: function (filePath) { + var extRegex = new RegExp(this.extname + '$'), + name = filePath.replace(extRegex, ''), + version = this.handlebarsVersion; + + // Fixes a Handlebars bug in versions prior to 1.0.rc.2 which caused + // partials with "/"s in their name to not be found. + // https://github.com/wycats/handlebars.js/pull/389 + if (version && !semver.satisfies(version, '>=1.0.0-rc.2')) { + name = name.replace('/', '.'); + } + + return name; + }, + + _loadDir: function (dirPath, options, callback) { + dirPath = path.resolve(dirPath); + + var dirCache = ExpressHandlebars._dirCache, + pendingReads = ExpressHandlebars._pendingReads, + dir = options.cache && dirCache[dirPath], + callbacks, pattern; + + if (dir) { + callback(null, dir.concat()); + return; + } + + callbacks = pendingReads[dirPath]; + + if (callbacks) { + callbacks.push(callback); + return; + } + + callbacks = pendingReads[dirPath] = [callback]; + pattern = '**/*' + this.extname; + + glob(pattern, {cwd: dirPath}, function (err, dir) { + if (!err) { + dirCache[dirPath] = dir; + } + + while (callbacks.length) { + callbacks.shift().call(null, err, dir && dir.concat()); + } + + delete pendingReads[dirPath]; + }); + }, + + _loadFile: function (filePath, options, callback) { + filePath = path.resolve(filePath); + + var fileCache = ExpressHandlebars._fileCache, + pendingReads = ExpressHandlebars._pendingReads, + file = options.cache && fileCache[filePath], + callbacks; + + if (file) { + callback(null, file); + return; + } + + callbacks = pendingReads[filePath]; + + if (callbacks) { + callbacks.push(callback); + return; + } + + callbacks = pendingReads[filePath] = [callback]; + + fs.readFile(filePath, 'utf8', function (err, file) { + if (!err) { + fileCache[filePath] = file; + } + + while (callbacks.length) { + callbacks.shift().call(null, err, file); + } + + delete pendingReads[filePath]; + }); + }, + + _renderTemplate: function (template, context, options, callback) { + var output; + + try { + output = template(context, options); + callback(null, output); + } catch (ex) { + callback(ex); + } + }, + + _resolveLayoutPath: function (options) { + // Makes sure to interpret falsy `options.layout` values as no layout. + var layoutPath = 'layout' in options ? options.layout : + this.defaultLayout; + + if (!layoutPath) { + return null; + } + + if (!path.extname(layoutPath)) { + layoutPath += this.extname; + } + + if (layoutPath[0] !== '/') { + layoutPath = path.join(this.layoutsDir, layoutPath); + } + + return layoutPath; + } +}); + +// -- Exports ------------------------------------------------------------------ + +module.exports = ExpressHandlebars; diff --git a/node_modules/express3-handlebars/node_modules/async/package.json b/node_modules/express3-handlebars/node_modules/async/package.json deleted file mode 100644 index 246ceca07..000000000 --- a/node_modules/express3-handlebars/node_modules/async/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "async", - "description": "Higher-order functions and common patterns for asynchronous code", - "main": "./lib/async", - "author": { - "name": "Caolan McMahon" - }, - "version": "0.2.10", - "repository": { - "type": "git", - "url": "https://github.com/caolan/async.git" - }, - "bugs": { - "url": "https://github.com/caolan/async/issues" - }, - "licenses": [ - { - "type": "MIT", - "url": "https://github.com/caolan/async/raw/master/LICENSE" - } - ], - "devDependencies": { - "nodeunit": ">0.0.0", - "uglify-js": "1.2.x", - "nodelint": ">0.0.0" - }, - "jam": { - "main": "lib/async.js", - "include": [ - "lib/async.js", - "README.md", - "LICENSE" - ] - }, - "scripts": { - "test": "nodeunit test/test-async.js" - }, - "readme": "# Async.js\n\nAsync is a utility module which provides straight-forward, powerful functions\nfor working with asynchronous JavaScript. Although originally designed for\nuse with [node.js](http://nodejs.org), it can also be used directly in the\nbrowser. Also supports [component](https://github.com/component/component).\n\nAsync provides around 20 functions that include the usual 'functional'\nsuspects (map, reduce, filter, each…) as well as some common patterns\nfor asynchronous control flow (parallel, series, waterfall…). All these\nfunctions assume you follow the node.js convention of providing a single\ncallback as the last argument of your async function.\n\n\n## Quick Examples\n\n```javascript\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n\nasync.parallel([\n function(){ ... },\n function(){ ... }\n], callback);\n\nasync.series([\n function(){ ... },\n function(){ ... }\n]);\n```\n\nThere are many more functions available so take a look at the docs below for a\nfull list. This module aims to be comprehensive, so if you feel anything is\nmissing please create a GitHub issue for it.\n\n## Common Pitfalls\n\n### Binding a context to an iterator\n\nThis section is really about bind, not about async. If you are wondering how to\nmake async execute your iterators in a given context, or are confused as to why\na method of another library isn't working as an iterator, study this example:\n\n```js\n// Here is a simple object with an (unnecessarily roundabout) squaring method\nvar AsyncSquaringLibrary = {\n squareExponent: 2,\n square: function(number, callback){ \n var result = Math.pow(number, this.squareExponent);\n setTimeout(function(){\n callback(null, result);\n }, 200);\n }\n};\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){\n // result is [NaN, NaN, NaN]\n // This fails because the `this.squareExponent` expression in the square\n // function is not evaluated in the context of AsyncSquaringLibrary, and is\n // therefore undefined.\n});\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){\n // result is [1, 4, 9]\n // With the help of bind we can attach a context to the iterator before\n // passing it to async. Now the square function will be executed in its \n // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`\n // will be as expected.\n});\n```\n\n## Download\n\nThe source is available for download from\n[GitHub](http://github.com/caolan/async).\nAlternatively, you can install using Node Package Manager (npm):\n\n npm install async\n\n__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed\n\n## In the Browser\n\nSo far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. Usage:\n\n```html\n\n\n```\n\n## Documentation\n\n### Collections\n\n* [each](#each)\n* [eachSeries](#eachSeries)\n* [eachLimit](#eachLimit)\n* [map](#map)\n* [mapSeries](#mapSeries)\n* [mapLimit](#mapLimit)\n* [filter](#filter)\n* [filterSeries](#filterSeries)\n* [reject](#reject)\n* [rejectSeries](#rejectSeries)\n* [reduce](#reduce)\n* [reduceRight](#reduceRight)\n* [detect](#detect)\n* [detectSeries](#detectSeries)\n* [sortBy](#sortBy)\n* [some](#some)\n* [every](#every)\n* [concat](#concat)\n* [concatSeries](#concatSeries)\n\n### Control Flow\n\n* [series](#series)\n* [parallel](#parallel)\n* [parallelLimit](#parallellimittasks-limit-callback)\n* [whilst](#whilst)\n* [doWhilst](#doWhilst)\n* [until](#until)\n* [doUntil](#doUntil)\n* [forever](#forever)\n* [waterfall](#waterfall)\n* [compose](#compose)\n* [applyEach](#applyEach)\n* [applyEachSeries](#applyEachSeries)\n* [queue](#queue)\n* [cargo](#cargo)\n* [auto](#auto)\n* [iterator](#iterator)\n* [apply](#apply)\n* [nextTick](#nextTick)\n* [times](#times)\n* [timesSeries](#timesSeries)\n\n### Utils\n\n* [memoize](#memoize)\n* [unmemoize](#unmemoize)\n* [log](#log)\n* [dir](#dir)\n* [noConflict](#noConflict)\n\n\n## Collections\n\n\n\n### each(arr, iterator, callback)\n\nApplies an iterator function to each item in an array, in parallel.\nThe iterator is called with an item from the list and a callback for when it\nhas finished. If the iterator passes an error to this callback, the main\ncallback for the each function is immediately called with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// assuming openFiles is an array of file names and saveFile is a function\n// to save the modified contents of that file:\n\nasync.each(openFiles, saveFile, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n\n### eachSeries(arr, iterator, callback)\n\nThe same as each only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. This means the iterator functions will complete in order.\n\n\n---------------------------------------\n\n\n\n### eachLimit(arr, limit, iterator, callback)\n\nThe same as each only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err) which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit null argument.\n* callback(err) - A callback which is called after all the iterator functions\n have finished, or an error has occurred.\n\n__Example__\n\n```js\n// Assume documents is an array of JSON objects and requestApi is a\n// function that interacts with a rate-limited REST api.\n\nasync.eachLimit(documents, 20, requestApi, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n\n### map(arr, iterator, callback)\n\nProduces a new array of values by mapping each value in the given array through\nthe iterator function. The iterator is called with an item from the array and a\ncallback for when it has finished processing. The callback takes 2 arguments, \nan error and the transformed item from the array. If the iterator passes an\nerror to this callback, the main callback for the map function is immediately\ncalled with the error.\n\nNote, that since this function applies the iterator to each item in parallel\nthere is no guarantee that the iterator functions will complete in order, however\nthe results array will be in the same order as the original array.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### mapSeries(arr, iterator, callback)\n\nThe same as map only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n---------------------------------------\n\n\n### mapLimit(arr, limit, iterator, callback)\n\nThe same as map only no more than \"limit\" iterators will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that\n the first \"limit\" iterator functions will complete before any others are \nstarted.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* limit - The maximum number of iterators to run at any time.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, transformed) which must be called once \n it has completed with an error (which can be null) and a transformed item.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array of the\n transformed items from the original array.\n\n__Example__\n\n```js\nasync.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n\n### filter(arr, iterator, callback)\n\n__Alias:__ select\n\nReturns a new array of all the values which pass an async truth test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. This operation is\nperformed in parallel, but the results array will be in the same order as the\noriginal.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(results) - A callback which is called after all the iterator\n functions have finished.\n\n__Example__\n\n```js\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n```\n\n---------------------------------------\n\n\n### filterSeries(arr, iterator, callback)\n\n__alias:__ selectSeries\n\nThe same as filter only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n---------------------------------------\n\n\n### reject(arr, iterator, callback)\n\nThe opposite of filter. Removes values that pass an async truth test.\n\n---------------------------------------\n\n\n### rejectSeries(arr, iterator, callback)\n\nThe same as reject, only the iterator is applied to each item in the array\nin series.\n\n\n---------------------------------------\n\n\n### reduce(arr, memo, iterator, callback)\n\n__aliases:__ inject, foldl\n\nReduces a list of values into a single value using an async iterator to return\neach successive step. Memo is the initial state of the reduction. This\nfunction only operates in series. For performance reasons, it may make sense to\nsplit a call to this function into a parallel map, then use the normal\nArray.prototype.reduce on the results. This function is for situations where\neach step in the reduction needs to be async, if you can get the data before\nreducing it then it's probably a good idea to do so.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* memo - The initial state of the reduction.\n* iterator(memo, item, callback) - A function applied to each item in the\n array to produce the next step in the reduction. The iterator is passed a\n callback(err, reduction) which accepts an optional error as its first \n argument, and the state of the reduction as the second. If an error is \n passed to the callback, the reduction is stopped and the main callback is \n immediately called with the error.\n* callback(err, result) - A callback which is called after all the iterator\n functions have finished. Result is the reduced value.\n\n__Example__\n\n```js\nasync.reduce([1,2,3], 0, function(memo, item, callback){\n // pointless async:\n process.nextTick(function(){\n callback(null, memo + item)\n });\n}, function(err, result){\n // result is now equal to the last value of memo, which is 6\n});\n```\n\n---------------------------------------\n\n\n### reduceRight(arr, memo, iterator, callback)\n\n__Alias:__ foldr\n\nSame as reduce, only operates on the items in the array in reverse order.\n\n\n---------------------------------------\n\n\n### detect(arr, iterator, callback)\n\nReturns the first value in a list that passes an async truth test. The\niterator is applied in parallel, meaning the first iterator to return true will\nfire the detect callback with that result. That means the result might not be\nthe first item in the original array (in terms of order) that passes the test.\n\nIf order within the original array is important then look at detectSeries.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n the first item in the array that passes the truth test (iterator) or the\n value undefined if none passed.\n\n__Example__\n\n```js\nasync.detect(['file1','file2','file3'], fs.exists, function(result){\n // result now equals the first file in the list that exists\n});\n```\n\n---------------------------------------\n\n\n### detectSeries(arr, iterator, callback)\n\nThe same as detect, only the iterator is applied to each item in the array\nin series. This means the result is always the first in the original array (in\nterms of array order) that passes the truth test.\n\n\n---------------------------------------\n\n\n### sortBy(arr, iterator, callback)\n\nSorts a list by the results of running each value through an async iterator.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, sortValue) which must be called once it\n has completed with an error (which can be null) and a value to use as the sort\n criteria.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is the items from\n the original array sorted by the values returned by the iterator calls.\n\n__Example__\n\n```js\nasync.sortBy(['file1','file2','file3'], function(file, callback){\n fs.stat(file, function(err, stats){\n callback(err, stats.mtime);\n });\n}, function(err, results){\n // results is now the original array of files sorted by\n // modified date\n});\n```\n\n---------------------------------------\n\n\n### some(arr, iterator, callback)\n\n__Alias:__ any\n\nReturns true if at least one element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists. Once any iterator\ncall returns true, the main callback is immediately called.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called as soon as any iterator returns\n true, or after all the iterator functions have finished. Result will be\n either true or false depending on the values of the async tests.\n\n__Example__\n\n```js\nasync.some(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then at least one of the files exists\n});\n```\n\n---------------------------------------\n\n\n### every(arr, iterator, callback)\n\n__Alias:__ all\n\nReturns true if every element in the array satisfies an async test.\n_The callback for each iterator call only accepts a single argument of true or\nfalse, it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like fs.exists.\n\n__Arguments__\n\n* arr - An array to iterate over.\n* iterator(item, callback) - A truth test to apply to each item in the array.\n The iterator is passed a callback(truthValue) which must be called with a \n boolean argument once it has completed.\n* callback(result) - A callback which is called after all the iterator\n functions have finished. Result will be either true or false depending on\n the values of the async tests.\n\n__Example__\n\n```js\nasync.every(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then every file exists\n});\n```\n\n---------------------------------------\n\n\n### concat(arr, iterator, callback)\n\nApplies an iterator to each item in a list, concatenating the results. Returns the\nconcatenated list. The iterators are called in parallel, and the results are\nconcatenated as they return. There is no guarantee that the results array will\nbe returned in the original order of the arguments passed to the iterator function.\n\n__Arguments__\n\n* arr - An array to iterate over\n* iterator(item, callback) - A function to apply to each item in the array.\n The iterator is passed a callback(err, results) which must be called once it \n has completed with an error (which can be null) and an array of results.\n* callback(err, results) - A callback which is called after all the iterator\n functions have finished, or an error has occurred. Results is an array containing\n the concatenated results of the iterator function.\n\n__Example__\n\n```js\nasync.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){\n // files is now a list of filenames that exist in the 3 directories\n});\n```\n\n---------------------------------------\n\n\n### concatSeries(arr, iterator, callback)\n\nSame as async.concat, but executes in series instead of parallel.\n\n\n## Control Flow\n\n\n### series(tasks, [callback])\n\nRun an array of functions in series, each one running once the previous\nfunction has completed. If any functions in the series pass an error to its\ncallback, no more functions are run and the callback for the series is\nimmediately called with the value of the error. Once the tasks have completed,\nthe results are passed to the final callback as an array.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.series.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed\n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.series([\n function(callback){\n // do some stuff ...\n callback(null, 'one');\n },\n function(callback){\n // do some more stuff ...\n callback(null, 'two');\n }\n],\n// optional callback\nfunction(err, results){\n // results is now equal to ['one', 'two']\n});\n\n\n// an example using an object instead of an array\nasync.series({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equal to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallel(tasks, [callback])\n\nRun an array of functions in parallel, without waiting until the previous\nfunction has completed. If any of the functions pass an error to its\ncallback, the main callback is immediately called with the value of the error.\nOnce the tasks have completed, the results are passed to the final callback as an\narray.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final callback as an object\ninstead of an array. This can be a more readable way of handling results from\nasync.parallel.\n\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.parallel([\n function(callback){\n setTimeout(function(){\n callback(null, 'one');\n }, 200);\n },\n function(callback){\n setTimeout(function(){\n callback(null, 'two');\n }, 100);\n }\n],\n// optional callback\nfunction(err, results){\n // the results array will equal ['one','two'] even though\n // the second function had a shorter timeout.\n});\n\n\n// an example using an object instead of an array\nasync.parallel({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equals to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n\n### parallelLimit(tasks, limit, [callback])\n\nThe same as parallel only the tasks are executed in parallel with a maximum of \"limit\" \ntasks executing at any time.\n\nNote that the tasks are not executed in batches, so there is no guarantee that \nthe first \"limit\" tasks will complete before any others are started.\n\n__Arguments__\n\n* tasks - An array or object containing functions to run, each function is passed \n a callback(err, result) it must call on completion with an error (which can\n be null) and an optional result value.\n* limit - The maximum number of tasks to run at any time.\n* callback(err, results) - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n---------------------------------------\n\n\n### whilst(test, fn, callback)\n\nRepeatedly call fn, while test returns true. Calls the callback when stopped,\nor an error occurs.\n\n__Arguments__\n\n* test() - synchronous truth test to perform before each execution of fn.\n* fn(callback) - A function to call each time the test passes. The function is\n passed a callback(err) which must be called once it has completed with an \n optional error argument.\n* callback(err) - A callback which is called after the test fails and repeated\n execution of fn has stopped.\n\n__Example__\n\n```js\nvar count = 0;\n\nasync.whilst(\n function () { return count < 5; },\n function (callback) {\n count++;\n setTimeout(callback, 1000);\n },\n function (err) {\n // 5 seconds have passed\n }\n);\n```\n\n---------------------------------------\n\n\n### doWhilst(fn, test, callback)\n\nThe post check version of whilst. To reflect the difference in the order of operations `test` and `fn` arguments are switched. `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.\n\n---------------------------------------\n\n\n### until(test, fn, callback)\n\nRepeatedly call fn, until test returns true. Calls the callback when stopped,\nor an error occurs.\n\nThe inverse of async.whilst.\n\n---------------------------------------\n\n\n### doUntil(fn, test, callback)\n\nLike doWhilst except the test is inverted. Note the argument ordering differs from `until`.\n\n---------------------------------------\n\n\n### forever(fn, callback)\n\nCalls the asynchronous function 'fn' repeatedly, in series, indefinitely.\nIf an error is passed to fn's callback then 'callback' is called with the\nerror, otherwise it will never be called.\n\n---------------------------------------\n\n\n### waterfall(tasks, [callback])\n\nRuns an array of functions in series, each passing their results to the next in\nthe array. However, if any of the functions pass an error to the callback, the\nnext function is not executed and the main callback is immediately called with\nthe error.\n\n__Arguments__\n\n* tasks - An array of functions to run, each function is passed a \n callback(err, result1, result2, ...) it must call on completion. The first\n argument is an error (which can be null) and any further arguments will be \n passed as arguments in order to the next task.\n* callback(err, [results]) - An optional callback to run once all the functions\n have completed. This will be passed the results of the last task's callback.\n\n\n\n__Example__\n\n```js\nasync.waterfall([\n function(callback){\n callback(null, 'one', 'two');\n },\n function(arg1, arg2, callback){\n callback(null, 'three');\n },\n function(arg1, callback){\n // arg1 now equals 'three'\n callback(null, 'done');\n }\n], function (err, result) {\n // result now equals 'done' \n});\n```\n\n---------------------------------------\n\n### compose(fn1, fn2...)\n\nCreates a function which is a composition of the passed asynchronous\nfunctions. Each function consumes the return value of the function that\nfollows. Composing functions f(), g() and h() would produce the result of\nf(g(h())), only this version uses callbacks to obtain the return values.\n\nEach function is executed with the `this` binding of the composed function.\n\n__Arguments__\n\n* functions... - the asynchronous functions to compose\n\n\n__Example__\n\n```js\nfunction add1(n, callback) {\n setTimeout(function () {\n callback(null, n + 1);\n }, 10);\n}\n\nfunction mul3(n, callback) {\n setTimeout(function () {\n callback(null, n * 3);\n }, 10);\n}\n\nvar add1mul3 = async.compose(mul3, add1);\n\nadd1mul3(4, function (err, result) {\n // result now equals 15\n});\n```\n\n---------------------------------------\n\n### applyEach(fns, args..., callback)\n\nApplies the provided arguments to each function in the array, calling the\ncallback after all functions have completed. If you only provide the first\nargument then it will return a function which lets you pass in the\narguments as if it were a single function call.\n\n__Arguments__\n\n* fns - the asynchronous functions to all call with the same arguments\n* args... - any number of separate arguments to pass to the function\n* callback - the final argument should be the callback, called when all\n functions have completed processing\n\n\n__Example__\n\n```js\nasync.applyEach([enableSearch, updateSchema], 'bucket', callback);\n\n// partial application example:\nasync.each(\n buckets,\n async.applyEach([enableSearch, updateSchema]),\n callback\n);\n```\n\n---------------------------------------\n\n\n### applyEachSeries(arr, iterator, callback)\n\nThe same as applyEach only the functions are applied in series.\n\n---------------------------------------\n\n\n### queue(worker, concurrency)\n\nCreates a queue object with the specified concurrency. Tasks added to the\nqueue will be processed in parallel (up to the concurrency limit). If all\nworkers are in progress, the task is queued until one is available. Once\na worker has completed a task, the task's callback is called.\n\n__Arguments__\n\n* worker(task, callback) - An asynchronous function for processing a queued\n task, which must call its callback(err) argument when finished, with an \n optional error as an argument.\n* concurrency - An integer for determining how many worker functions should be\n run in parallel.\n\n__Queue objects__\n\nThe queue object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* concurrency - an integer for determining how many worker functions should be\n run in parallel. This property can be changed after a queue is created to\n alter the concurrency on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* unshift(task, [callback]) - add a new task to the front of the queue.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a queue object with concurrency 2\n\nvar q = async.queue(function (task, callback) {\n console.log('hello ' + task.name);\n callback();\n}, 2);\n\n\n// assign a callback\nq.drain = function() {\n console.log('all items have been processed');\n}\n\n// add some items to the queue\n\nq.push({name: 'foo'}, function (err) {\n console.log('finished processing foo');\n});\nq.push({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the queue (batch-wise)\n\nq.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the front of the queue\n\nq.unshift({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n```\n\n---------------------------------------\n\n\n### cargo(worker, [payload])\n\nCreates a cargo object with the specified payload. Tasks added to the\ncargo will be processed altogether (up to the payload limit). If the\nworker is in progress, the task is queued until it is available. Once\nthe worker has completed some tasks, each callback of those tasks is called.\n\n__Arguments__\n\n* worker(tasks, callback) - An asynchronous function for processing an array of\n queued tasks, which must call its callback(err) argument when finished, with \n an optional error as an argument.\n* payload - An optional integer for determining how many tasks should be\n processed per round; if omitted, the default is unlimited.\n\n__Cargo objects__\n\nThe cargo object returned by this function has the following properties and\nmethods:\n\n* length() - a function returning the number of items waiting to be processed.\n* payload - an integer for determining how many tasks should be\n process per round. This property can be changed after a cargo is created to\n alter the payload on-the-fly.\n* push(task, [callback]) - add a new task to the queue, the callback is called\n once the worker has finished processing the task.\n instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.\n* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued\n* empty - a callback that is called when the last item from the queue is given to a worker\n* drain - a callback that is called when the last item from the queue has returned from the worker\n\n__Example__\n\n```js\n// create a cargo object with payload 2\n\nvar cargo = async.cargo(function (tasks, callback) {\n for(var i=0; i\n### auto(tasks, [callback])\n\nDetermines the best order for running functions based on their requirements.\nEach function can optionally depend on other functions being completed first,\nand each function is run as soon as its requirements are satisfied. If any of\nthe functions pass an error to their callback, that function will not complete\n(so any other functions depending on it will not run) and the main callback\nwill be called immediately with the error. Functions also receive an object\ncontaining the results of functions which have completed so far.\n\nNote, all functions are called with a results object as a second argument, \nso it is unsafe to pass functions in the tasks object which cannot handle the\nextra argument. For example, this snippet of code:\n\n```js\nasync.auto({\n readData: async.apply(fs.readFile, 'data.txt', 'utf-8')\n}, callback);\n```\n\nwill have the effect of calling readFile with the results object as the last\nargument, which will fail:\n\n```js\nfs.readFile('data.txt', 'utf-8', cb, {});\n```\n\nInstead, wrap the call to readFile in a function which does not forward the \nresults object:\n\n```js\nasync.auto({\n readData: function(cb, results){\n fs.readFile('data.txt', 'utf-8', cb);\n }\n}, callback);\n```\n\n__Arguments__\n\n* tasks - An object literal containing named functions or an array of\n requirements, with the function itself the last item in the array. The key\n used for each function or array is used when specifying requirements. The \n function receives two arguments: (1) a callback(err, result) which must be \n called when finished, passing an error (which can be null) and the result of \n the function's execution, and (2) a results object, containing the results of\n the previously executed functions.\n* callback(err, results) - An optional callback which is called when all the\n tasks have been completed. The callback will receive an error as an argument\n if any tasks pass an error to their callback. Results will always be passed\n\tbut if an error occurred, no other tasks will be performed, and the results\n\tobject will only contain partial results.\n \n\n__Example__\n\n```js\nasync.auto({\n get_data: function(callback){\n // async code to get some data\n },\n make_folder: function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n },\n write_file: ['get_data', 'make_folder', function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n callback(null, filename);\n }],\n email_link: ['write_file', function(callback, results){\n // once the file is written let's email a link to it...\n // results.write_file contains the filename returned by write_file.\n }]\n});\n```\n\nThis is a fairly trivial example, but to do this using the basic parallel and\nseries functions would look like this:\n\n```js\nasync.parallel([\n function(callback){\n // async code to get some data\n },\n function(callback){\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n }\n],\nfunction(err, results){\n async.series([\n function(callback){\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n },\n function(callback){\n // once the file is written let's email a link to it...\n }\n ]);\n});\n```\n\nFor a complicated series of async tasks using the auto function makes adding\nnew tasks much easier and makes the code more readable.\n\n\n---------------------------------------\n\n\n### iterator(tasks)\n\nCreates an iterator function which calls the next function in the array,\nreturning a continuation to call the next one after that. It's also possible to\n'peek' the next iterator by doing iterator.next().\n\nThis function is used internally by the async module but can be useful when\nyou want to manually control the flow of functions in series.\n\n__Arguments__\n\n* tasks - An array of functions to run.\n\n__Example__\n\n```js\nvar iterator = async.iterator([\n function(){ sys.p('one'); },\n function(){ sys.p('two'); },\n function(){ sys.p('three'); }\n]);\n\nnode> var iterator2 = iterator();\n'one'\nnode> var iterator3 = iterator2();\n'two'\nnode> iterator3();\n'three'\nnode> var nextfn = iterator2.next();\nnode> nextfn();\n'three'\n```\n\n---------------------------------------\n\n\n### apply(function, arguments..)\n\nCreates a continuation function with some arguments already applied, a useful\nshorthand when combined with other control flow functions. Any arguments\npassed to the returned function are added to the arguments originally passed\nto apply.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to automatically apply when the\n continuation is called.\n\n__Example__\n\n```js\n// using apply\n\nasync.parallel([\n async.apply(fs.writeFile, 'testfile1', 'test1'),\n async.apply(fs.writeFile, 'testfile2', 'test2'),\n]);\n\n\n// the same process without using apply\n\nasync.parallel([\n function(callback){\n fs.writeFile('testfile1', 'test1', callback);\n },\n function(callback){\n fs.writeFile('testfile2', 'test2', callback);\n }\n]);\n```\n\nIt's possible to pass any number of additional arguments when calling the\ncontinuation:\n\n```js\nnode> var fn = async.apply(sys.puts, 'one');\nnode> fn('two', 'three');\none\ntwo\nthree\n```\n\n---------------------------------------\n\n\n### nextTick(callback)\n\nCalls the callback on a later loop around the event loop. In node.js this just\ncalls process.nextTick, in the browser it falls back to setImmediate(callback)\nif available, otherwise setTimeout(callback, 0), which means other higher priority\nevents may precede the execution of the callback.\n\nThis is used internally for browser-compatibility purposes.\n\n__Arguments__\n\n* callback - The function to call on a later loop around the event loop.\n\n__Example__\n\n```js\nvar call_order = [];\nasync.nextTick(function(){\n call_order.push('two');\n // call_order now equals ['one','two']\n});\ncall_order.push('one')\n```\n\n\n### times(n, callback)\n\nCalls the callback n times and accumulates results in the same manner\nyou would use with async.map.\n\n__Arguments__\n\n* n - The number of times to run the function.\n* callback - The function to call n times.\n\n__Example__\n\n```js\n// Pretend this is some complicated async factory\nvar createUser = function(id, callback) {\n callback(null, {\n id: 'user' + id\n })\n}\n// generate 5 users\nasync.times(5, function(n, next){\n createUser(n, function(err, user) {\n next(err, user)\n })\n}, function(err, users) {\n // we should now have 5 users\n});\n```\n\n\n### timesSeries(n, callback)\n\nThe same as times only the iterator is applied to each item in the array in\nseries. The next iterator is only called once the current one has completed\nprocessing. The results array will be in the same order as the original.\n\n\n## Utils\n\n\n### memoize(fn, [hasher])\n\nCaches the results of an async function. When creating a hash to store function\nresults against, the callback is omitted from the hash and an optional hash\nfunction can be used.\n\nThe cache of results is exposed as the `memo` property of the function returned\nby `memoize`.\n\n__Arguments__\n\n* fn - the function you to proxy and cache results from.\n* hasher - an optional function for generating a custom hash for storing\n results, it has all the arguments applied to it apart from the callback, and\n must be synchronous.\n\n__Example__\n\n```js\nvar slow_fn = function (name, callback) {\n // do something\n callback(null, result);\n};\nvar fn = async.memoize(slow_fn);\n\n// fn can now be used as if it were slow_fn\nfn('some name', function () {\n // callback\n});\n```\n\n\n### unmemoize(fn)\n\nUndoes a memoized function, reverting it to the original, unmemoized\nform. Comes handy in tests.\n\n__Arguments__\n\n* fn - the memoized function\n\n\n### log(function, arguments)\n\nLogs the result of an async function to the console. Only works in node.js or\nin browsers that support console.log and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.log is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, 'hello ' + name);\n }, 1000);\n};\n```\n```js\nnode> async.log(hello, 'world');\n'hello world'\n```\n\n---------------------------------------\n\n\n### dir(function, arguments)\n\nLogs the result of an async function to the console using console.dir to\ndisplay the properties of the resulting object. Only works in node.js or\nin browsers that support console.dir and console.error (such as FF and Chrome).\nIf multiple arguments are returned from the async function, console.dir is\ncalled on each argument in order.\n\n__Arguments__\n\n* function - The function you want to eventually apply all arguments to.\n* arguments... - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, {hello: name});\n }, 1000);\n};\n```\n```js\nnode> async.dir(hello, 'world');\n{hello: 'world'}\n```\n\n---------------------------------------\n\n\n### noConflict()\n\nChanges the value of async back to its original value, returning a reference to the\nasync object.\n", - "readmeFilename": "README.md", - "homepage": "https://github.com/caolan/async", - "_id": "async@0.2.10", - "dist": { - "shasum": "6fd106274306f9b5343b1502a0cb4377cfd72dac" - }, - "_from": "async@~0.2", - "_resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" -} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits.js b/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits.js deleted file mode 100644 index 29f5e24f5..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('util').inherits diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits_browser.js b/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits_browser.js deleted file mode 100644 index c1e78a75e..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/inherits_browser.js +++ /dev/null @@ -1,23 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } -} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/package.json b/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/package.json deleted file mode 100644 index 5bf0db561..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "inherits", - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "version": "2.0.1", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "main": "./inherits.js", - "browser": "./inherits_browser.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/inherits" - }, - "license": "ISC", - "scripts": { - "test": "node test" - }, - "readme": "Browser-friendly inheritance fully compatible with standard node.js\n[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).\n\nThis package exports standard `inherits` from node.js `util` module in\nnode environment, but also provides alternative browser-friendly\nimplementation through [browser\nfield](https://gist.github.com/shtylman/4339901). Alternative\nimplementation is a literal copy of standard one located in standalone\nmodule to avoid requiring of `util`. It also has a shim for old\nbrowsers with no `Object.create` support.\n\nWhile keeping you sure you are using standard `inherits`\nimplementation in node.js environment, it allows bundlers such as\n[browserify](https://github.com/substack/node-browserify) to not\ninclude full `util` package to your client code if all you need is\njust `inherits` function. It worth, because browser shim for `util`\npackage is large and `inherits` is often the single function you need\nfrom it.\n\nIt's recommended to use this package instead of\n`require('util').inherits` for any code that has chances to be used\nnot only in node.js but in browser too.\n\n## usage\n\n```js\nvar inherits = require('inherits');\n// then use exactly as the standard one\n```\n\n## note on version ~1.0\n\nVersion ~1.0 had completely different motivation and is not compatible\nneither with 2.0 nor with standard node.js `inherits`.\n\nIf you are using version ~1.0 and planning to switch to ~2.0, be\ncareful:\n\n* new version uses `super_` instead of `super` for referencing\n superclass\n* new version overwrites current prototype while old one preserves any\n existing fields on it\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/inherits/issues" - }, - "homepage": "https://github.com/isaacs/inherits", - "_id": "inherits@2.0.1", - "_from": "inherits@2" -} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/test.js b/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/test.js deleted file mode 100644 index fc53012d3..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/test.js +++ /dev/null @@ -1,25 +0,0 @@ -var inherits = require('./inherits.js') -var assert = require('assert') - -function test(c) { - assert(c.constructor === Child) - assert(c.constructor.super_ === Parent) - assert(Object.getPrototypeOf(c) === Child.prototype) - assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) - assert(c instanceof Child) - assert(c instanceof Parent) -} - -function Child() { - Parent.call(this) - test(this) -} - -function Parent() {} - -inherits(Child, Parent) - -var c = new Child -test(c) - -console.log('ok') diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE b/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE deleted file mode 100644 index 05a401094..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -Copyright 2009, 2010, 2011 Isaac Z. Schlueter. -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json b/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json deleted file mode 100644 index 4472725df..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "2.5.0", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me" - }, - "scripts": { - "test": "tap test --gc" - }, - "main": "lib/lru-cache.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-lru-cache.git" - }, - "devDependencies": { - "tap": "", - "weak": "" - }, - "license": { - "type": "MIT", - "url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE" - }, - "readme": "# lru cache\n\nA cache object that deletes the least-recently-used items.\n\n## Usage:\n\n```javascript\nvar LRU = require(\"lru-cache\")\n , options = { max: 500\n , length: function (n) { return n * 2 }\n , dispose: function (key, n) { n.close() }\n , maxAge: 1000 * 60 * 60 }\n , cache = LRU(options)\n , otherCache = LRU(50) // sets just the max size\n\ncache.set(\"key\", \"value\")\ncache.get(\"key\") // \"value\"\n\ncache.reset() // empty the cache\n```\n\nIf you put more stuff in it, then items will fall out.\n\nIf you try to put an oversized thing in it, then it'll fall out right\naway.\n\n## Options\n\n* `max` The maximum size of the cache, checked by applying the length\n function to all values in the cache. Not setting this is kind of\n silly, since that's the whole purpose of this lib, but it defaults\n to `Infinity`.\n* `maxAge` Maximum age in ms. Items are not pro-actively pruned out\n as they age, but if you try to get an item that is too old, it'll\n drop it and return undefined instead of giving it to you.\n* `length` Function that is used to calculate the length of stored\n items. If you're storing strings or buffers, then you probably want\n to do something like `function(n){return n.length}`. The default is\n `function(n){return 1}`, which is fine if you want to store `n`\n like-sized things.\n* `dispose` Function that is called on items when they are dropped\n from the cache. This can be handy if you want to close file\n descriptors or do other cleanup tasks when items are no longer\n accessible. Called with `key, value`. It's called *before*\n actually removing the item from the internal cache, so if you want\n to immediately put it back in, you'll have to do that in a\n `nextTick` or `setTimeout` callback or it won't do anything.\n* `stale` By default, if you set a `maxAge`, it'll only actually pull\n stale items out of the cache when you `get(key)`. (That is, it's\n not pre-emptively doing a `setTimeout` or anything.) If you set\n `stale:true`, it'll return the stale value before deleting it. If\n you don't set this, then it'll return `undefined` when you try to\n get a stale entry, as if it had already been deleted.\n\n## API\n\n* `set(key, value)`\n* `get(key) => value`\n\n Both of these will update the \"recently used\"-ness of the key.\n They do what you think.\n\n* `peek(key)`\n\n Returns the key value (or `undefined` if not found) without\n updating the \"recently used\"-ness of the key.\n\n (If you find yourself using this a lot, you *might* be using the\n wrong sort of data structure, but there are some use cases where\n it's handy.)\n\n* `del(key)`\n\n Deletes a key out of the cache.\n\n* `reset()`\n\n Clear the cache entirely, throwing away all values.\n\n* `has(key)`\n\n Check if a key is in the cache, without updating the recent-ness\n or deleting it for being stale.\n\n* `forEach(function(value,key,cache), [thisp])`\n\n Just like `Array.prototype.forEach`. Iterates over all the keys\n in the cache, in order of recent-ness. (Ie, more recently used\n items are iterated over first.)\n\n* `keys()`\n\n Return an array of the keys in the cache.\n\n* `values()`\n\n Return an array of the values in the cache.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-lru-cache/issues" - }, - "homepage": "https://github.com/isaacs/node-lru-cache", - "_id": "lru-cache@2.5.0", - "_from": "lru-cache@2" -} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/foreach.js b/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/foreach.js deleted file mode 100644 index eefb80d9d..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/foreach.js +++ /dev/null @@ -1,52 +0,0 @@ -var test = require('tap').test -var LRU = require('../') - -test('forEach', function (t) { - var l = new LRU(5) - for (var i = 0; i < 10; i ++) { - l.set(i.toString(), i.toString(2)) - } - - var i = 9 - l.forEach(function (val, key, cache) { - t.equal(cache, l) - t.equal(key, i.toString()) - t.equal(val, i.toString(2)) - i -= 1 - }) - - // get in order of most recently used - l.get(6) - l.get(8) - - var order = [ 8, 6, 9, 7, 5 ] - var i = 0 - - l.forEach(function (val, key, cache) { - var j = order[i ++] - t.equal(cache, l) - t.equal(key, j.toString()) - t.equal(val, j.toString(2)) - }) - - t.end() -}) - -test('keys() and values()', function (t) { - var l = new LRU(5) - for (var i = 0; i < 10; i ++) { - l.set(i.toString(), i.toString(2)) - } - - t.similar(l.keys(), ['9', '8', '7', '6', '5']) - t.similar(l.values(), ['1001', '1000', '111', '110', '101']) - - // get in order of most recently used - l.get(6) - l.get(8) - - t.similar(l.keys(), ['8', '6', '9', '7', '5']) - t.similar(l.values(), ['1000', '110', '1001', '111', '101']) - - t.end() -}) diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json b/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json deleted file mode 100644 index cb7e2bd4b..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "sigmund", - "version": "1.0.0", - "description": "Quick and dirty signatures for Objects.", - "main": "sigmund.js", - "directories": { - "test": "test" - }, - "dependencies": {}, - "devDependencies": { - "tap": "~0.3.0" - }, - "scripts": { - "test": "tap test/*.js", - "bench": "node bench.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/sigmund" - }, - "keywords": [ - "object", - "signature", - "key", - "data", - "psychoanalysis" - ], - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "license": "BSD", - "readme": "# sigmund\n\nQuick and dirty signatures for Objects.\n\nThis is like a much faster `deepEquals` comparison, which returns a\nstring key suitable for caches and the like.\n\n## Usage\n\n```javascript\nfunction doSomething (someObj) {\n var key = sigmund(someObj, maxDepth) // max depth defaults to 10\n var cached = cache.get(key)\n if (cached) return cached)\n\n var result = expensiveCalculation(someObj)\n cache.set(key, result)\n return result\n}\n```\n\nThe resulting key will be as unique and reproducible as calling\n`JSON.stringify` or `util.inspect` on the object, but is much faster.\nIn order to achieve this speed, some differences are glossed over.\nFor example, the object `{0:'foo'}` will be treated identically to the\narray `['foo']`.\n\nAlso, just as there is no way to summon the soul from the scribblings\nof a cocain-addled psychoanalyst, there is no way to revive the object\nfrom the signature string that sigmund gives you. In fact, it's\nbarely even readable.\n\nAs with `sys.inspect` and `JSON.stringify`, larger objects will\nproduce larger signature strings.\n\nBecause sigmund is a bit less strict than the more thorough\nalternatives, the strings will be shorter, and also there is a\nslightly higher chance for collisions. For example, these objects\nhave the same signature:\n\n var obj1 = {a:'b',c:/def/,g:['h','i',{j:'',k:'l'}]}\n var obj2 = {a:'b',c:'/def/',g:['h','i','{jkl']}\n\nLike a good Freudian, sigmund is most effective when you already have\nsome understanding of what you're looking for. It can help you help\nyourself, but you must be willing to do some work as well.\n\nCycles are handled, and cyclical objects are silently omitted (though\nthe key is included in the signature output.)\n\nThe second argument is the maximum depth, which defaults to 10,\nbecause that is the maximum object traversal depth covered by most\ninsurance carriers.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/sigmund/issues" - }, - "homepage": "https://github.com/isaacs/sigmund", - "_id": "sigmund@1.0.0", - "_from": "sigmund@~1.0.0" -} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/package.json b/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/package.json deleted file mode 100644 index f8f545aa3..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me" - }, - "name": "minimatch", - "description": "a glob matcher in javascript", - "version": "0.2.14", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/minimatch.git" - }, - "main": "minimatch.js", - "scripts": { - "test": "tap test/*.js" - }, - "engines": { - "node": "*" - }, - "dependencies": { - "lru-cache": "2", - "sigmund": "~1.0.0" - }, - "devDependencies": { - "tap": "" - }, - "license": { - "type": "MIT", - "url": "http://github.com/isaacs/minimatch/raw/master/LICENSE" - }, - "readme": "# minimatch\n\nA minimal matching utility.\n\n[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch)\n\n\nThis is the matching library used internally by npm.\n\nEventually, it will replace the C binding in node-glob.\n\nIt works by converting glob expressions into JavaScript `RegExp`\nobjects.\n\n## Usage\n\n```javascript\nvar minimatch = require(\"minimatch\")\n\nminimatch(\"bar.foo\", \"*.foo\") // true!\nminimatch(\"bar.foo\", \"*.bar\") // false!\nminimatch(\"bar.foo\", \"*.+(bar|foo)\", { debug: true }) // true, and noisy!\n```\n\n## Features\n\nSupports these glob features:\n\n* Brace Expansion\n* Extended glob matching\n* \"Globstar\" `**` matching\n\nSee:\n\n* `man sh`\n* `man bash`\n* `man 3 fnmatch`\n* `man 5 gitignore`\n\n## Minimatch Class\n\nCreate a minimatch object by instanting the `minimatch.Minimatch` class.\n\n```javascript\nvar Minimatch = require(\"minimatch\").Minimatch\nvar mm = new Minimatch(pattern, options)\n```\n\n### Properties\n\n* `pattern` The original pattern the minimatch object represents.\n* `options` The options supplied to the constructor.\n* `set` A 2-dimensional array of regexp or string expressions.\n Each row in the\n array corresponds to a brace-expanded pattern. Each item in the row\n corresponds to a single path-part. For example, the pattern\n `{a,b/c}/d` would expand to a set of patterns like:\n\n [ [ a, d ]\n , [ b, c, d ] ]\n\n If a portion of the pattern doesn't have any \"magic\" in it\n (that is, it's something like `\"foo\"` rather than `fo*o?`), then it\n will be left as a string rather than converted to a regular\n expression.\n\n* `regexp` Created by the `makeRe` method. A single regular expression\n expressing the entire pattern. This is useful in cases where you wish\n to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.\n* `negate` True if the pattern is negated.\n* `comment` True if the pattern is a comment.\n* `empty` True if the pattern is `\"\"`.\n\n### Methods\n\n* `makeRe` Generate the `regexp` member if necessary, and return it.\n Will return `false` if the pattern is invalid.\n* `match(fname)` Return true if the filename matches the pattern, or\n false otherwise.\n* `matchOne(fileArray, patternArray, partial)` Take a `/`-split\n filename, and match it against a single row in the `regExpSet`. This\n method is mainly for internal use, but is exposed so that it can be\n used by a glob-walker that needs to avoid excessive filesystem calls.\n\nAll other methods are internal, and will be called as necessary.\n\n## Functions\n\nThe top-level exported function has a `cache` property, which is an LRU\ncache set to store 100 items. So, calling these methods repeatedly\nwith the same pattern and options will use the same Minimatch object,\nsaving the cost of parsing it multiple times.\n\n### minimatch(path, pattern, options)\n\nMain export. Tests a path against the pattern using the options.\n\n```javascript\nvar isJS = minimatch(file, \"*.js\", { matchBase: true })\n```\n\n### minimatch.filter(pattern, options)\n\nReturns a function that tests its\nsupplied argument, suitable for use with `Array.filter`. Example:\n\n```javascript\nvar javascripts = fileList.filter(minimatch.filter(\"*.js\", {matchBase: true}))\n```\n\n### minimatch.match(list, pattern, options)\n\nMatch against the list of\nfiles, in the style of fnmatch or glob. If nothing is matched, and\noptions.nonull is set, then return a list containing the pattern itself.\n\n```javascript\nvar javascripts = minimatch.match(fileList, \"*.js\", {matchBase: true}))\n```\n\n### minimatch.makeRe(pattern, options)\n\nMake a regular expression object from the pattern.\n\n## Options\n\nAll options are `false` by default.\n\n### debug\n\nDump a ton of stuff to stderr.\n\n### nobrace\n\nDo not expand `{a,b}` and `{1..3}` brace sets.\n\n### noglobstar\n\nDisable `**` matching against multiple folder names.\n\n### dot\n\nAllow patterns to match filenames starting with a period, even if\nthe pattern does not explicitly have a period in that spot.\n\nNote that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`\nis set.\n\n### noext\n\nDisable \"extglob\" style patterns like `+(a|b)`.\n\n### nocase\n\nPerform a case-insensitive match.\n\n### nonull\n\nWhen a match is not found by `minimatch.match`, return a list containing\nthe pattern itself. When set, an empty list is returned if there are\nno matches.\n\n### matchBase\n\nIf set, then patterns without slashes will be matched\nagainst the basename of the path if it contains slashes. For example,\n`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.\n\n### nocomment\n\nSuppress the behavior of treating `#` at the start of a pattern as a\ncomment.\n\n### nonegate\n\nSuppress the behavior of treating a leading `!` character as negation.\n\n### flipNegate\n\nReturns from negate expressions the same as if they were not negated.\n(Ie, true on a hit, false on a miss.)\n\n\n## Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between minimatch and other\nimplementations, and are intentional.\n\nIf the pattern starts with a `!` character, then it is negated. Set the\n`nonegate` flag to suppress this behavior, and treat leading `!`\ncharacters normally. This is perhaps relevant if you wish to start the\npattern with a negative extglob pattern like `!(a|B)`. Multiple `!`\ncharacters at the start of a pattern will negate the pattern multiple\ntimes.\n\nIf a pattern starts with `#`, then it is treated as a comment, and\nwill not match anything. Use `\\#` to match a literal `#` at the\nstart of a line, or set the `nocomment` flag to suppress this behavior.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.1, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not.\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen minimatch.match returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`minimatch.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/minimatch/issues" - }, - "homepage": "https://github.com/isaacs/minimatch", - "_id": "minimatch@0.2.14", - "_from": "minimatch@~0.2.11" -} diff --git a/node_modules/express3-handlebars/node_modules/glob/package.json b/node_modules/express3-handlebars/node_modules/glob/package.json deleted file mode 100644 index 3fbf80e5b..000000000 --- a/node_modules/express3-handlebars/node_modules/glob/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "name": "glob", - "description": "a little globber", - "version": "3.2.8", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "main": "glob.js", - "engines": { - "node": "*" - }, - "dependencies": { - "minimatch": "~0.2.11", - "inherits": "2" - }, - "devDependencies": { - "tap": "~0.4.0", - "mkdirp": "0", - "rimraf": "1" - }, - "scripts": { - "test": "tap test/*.js" - }, - "license": "BSD", - "readme": "# Glob\n\nMatch files using the patterns the shell uses, like stars and stuff.\n\nThis is a glob implementation in JavaScript. It uses the `minimatch`\nlibrary to do its matching.\n\n## Attention: node-glob users!\n\nThe API has changed dramatically between 2.x and 3.x. This library is\nnow 100% JavaScript, and the integer flags have been replaced with an\noptions object.\n\nAlso, there's an event emitter class, proper tests, and all the other\nthings you've come to expect from node modules.\n\nAnd best of all, no compilation!\n\n## Usage\n\n```javascript\nvar glob = require(\"glob\")\n\n// options is optional\nglob(\"**/*.js\", options, function (er, files) {\n // files is an array of filenames.\n // If the `nonull` option is set, and nothing\n // was found, then files is [\"**/*.js\"]\n // er is an error object or null.\n})\n```\n\n## Features\n\nPlease see the [minimatch\ndocumentation](https://github.com/isaacs/minimatch) for more details.\n\nSupports these glob features:\n\n* Brace Expansion\n* Extended glob matching\n* \"Globstar\" `**` matching\n\nSee:\n\n* `man sh`\n* `man bash`\n* `man 3 fnmatch`\n* `man 5 gitignore`\n* [minimatch documentation](https://github.com/isaacs/minimatch)\n\n## glob(pattern, [options], cb)\n\n* `pattern` {String} Pattern to be matched\n* `options` {Object}\n* `cb` {Function}\n * `err` {Error | null}\n * `matches` {Array} filenames found matching the pattern\n\nPerform an asynchronous glob search.\n\n## glob.sync(pattern, [options])\n\n* `pattern` {String} Pattern to be matched\n* `options` {Object}\n* return: {Array} filenames found matching the pattern\n\nPerform a synchronous glob search.\n\n## Class: glob.Glob\n\nCreate a Glob object by instanting the `glob.Glob` class.\n\n```javascript\nvar Glob = require(\"glob\").Glob\nvar mg = new Glob(pattern, options, cb)\n```\n\nIt's an EventEmitter, and starts walking the filesystem to find matches\nimmediately.\n\n### new glob.Glob(pattern, [options], [cb])\n\n* `pattern` {String} pattern to search for\n* `options` {Object}\n* `cb` {Function} Called when an error occurs, or matches are found\n * `err` {Error | null}\n * `matches` {Array} filenames found matching the pattern\n\nNote that if the `sync` flag is set in the options, then matches will\nbe immediately available on the `g.found` member.\n\n### Properties\n\n* `minimatch` The minimatch object that the glob uses.\n* `options` The options object passed in.\n* `error` The error encountered. When an error is encountered, the\n glob object is in an undefined state, and should be discarded.\n* `aborted` Boolean which is set to true when calling `abort()`. There\n is no way at this time to continue a glob search after aborting, but\n you can re-use the statCache to avoid having to duplicate syscalls.\n* `statCache` Collection of all the stat results the glob search\n performed.\n* `cache` Convenience object. Each field has the following possible\n values:\n * `false` - Path does not exist\n * `true` - Path exists\n * `1` - Path exists, and is not a directory\n * `2` - Path exists, and is a directory\n * `[file, entries, ...]` - Path exists, is a directory, and the\n array value is the results of `fs.readdir`\n\n### Events\n\n* `end` When the matching is finished, this is emitted with all the\n matches found. If the `nonull` option is set, and no match was found,\n then the `matches` list contains the original pattern. The matches\n are sorted, unless the `nosort` flag is set.\n* `match` Every time a match is found, this is emitted with the matched.\n* `error` Emitted when an unexpected error is encountered, or whenever\n any fs error occurs if `options.strict` is set.\n* `abort` When `abort()` is called, this event is raised.\n\n### Methods\n\n* `abort` Stop the search.\n\n### Options\n\nAll the options that can be passed to Minimatch can also be passed to\nGlob to change pattern matching behavior. Also, some have been added,\nor have glob-specific ramifications.\n\nAll options are false by default, unless otherwise noted.\n\nAll options are added to the glob object, as well.\n\n* `cwd` The current working directory in which to search. Defaults\n to `process.cwd()`.\n* `root` The place where patterns starting with `/` will be mounted\n onto. Defaults to `path.resolve(options.cwd, \"/\")` (`/` on Unix\n systems, and `C:\\` or some such on Windows.)\n* `dot` Include `.dot` files in normal matches and `globstar` matches.\n Note that an explicit dot in a portion of the pattern will always\n match dot files.\n* `nomount` By default, a pattern starting with a forward-slash will be\n \"mounted\" onto the root setting, so that a valid filesystem path is\n returned. Set this flag to disable that behavior.\n* `mark` Add a `/` character to directory matches. Note that this\n requires additional stat calls.\n* `nosort` Don't sort the results.\n* `stat` Set to true to stat *all* results. This reduces performance\n somewhat, and is completely unnecessary, unless `readdir` is presumed\n to be an untrustworthy indicator of file existence. It will cause\n ELOOP to be triggered one level sooner in the case of cyclical\n symbolic links.\n* `silent` When an unusual error is encountered\n when attempting to read a directory, a warning will be printed to\n stderr. Set the `silent` option to true to suppress these warnings.\n* `strict` When an unusual error is encountered\n when attempting to read a directory, the process will just continue on\n in search of other matches. Set the `strict` option to raise an error\n in these cases.\n* `cache` See `cache` property above. Pass in a previously generated\n cache object to save some fs calls.\n* `statCache` A cache of results of filesystem information, to prevent\n unnecessary stat calls. While it should not normally be necessary to\n set this, you may pass the statCache from one glob() call to the\n options object of another, if you know that the filesystem will not\n change between calls. (See \"Race Conditions\" below.)\n* `sync` Perform a synchronous glob search.\n* `nounique` In some cases, brace-expanded patterns can result in the\n same file showing up multiple times in the result set. By default,\n this implementation prevents duplicates in the result set.\n Set this flag to disable that behavior.\n* `nonull` Set to never return an empty set, instead returning a set\n containing the pattern itself. This is the default in glob(3).\n* `nocase` Perform a case-insensitive match. Note that case-insensitive\n filesystems will sometimes result in glob returning results that are\n case-insensitively matched anyway, since readdir and stat will not\n raise an error.\n* `debug` Set to enable debug logging in minimatch and glob.\n* `globDebug` Set to enable debug logging in glob, but not minimatch.\n\n## Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between node-glob and other\nimplementations, and are intentional.\n\nIf the pattern starts with a `!` character, then it is negated. Set the\n`nonegate` flag to suppress this behavior, and treat leading `!`\ncharacters normally. This is perhaps relevant if you wish to start the\npattern with a negative extglob pattern like `!(a|B)`. Multiple `!`\ncharacters at the start of a pattern will negate the pattern multiple\ntimes.\n\nIf a pattern starts with `#`, then it is treated as a comment, and\nwill not match anything. Use `\\#` to match a literal `#` at the\nstart of a line, or set the `nocomment` flag to suppress this behavior.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.1, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not.\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen glob returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`glob.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n\n## Windows\n\n**Please only use forward-slashes in glob expressions.**\n\nThough windows uses either `/` or `\\` as its path separator, only `/`\ncharacters are used by this glob implementation. You must use\nforward-slashes **only** in glob expressions. Back-slashes will always\nbe interpreted as escape characters, not path separators.\n\nResults from absolute patterns such as `/foo/*` are mounted onto the\nroot setting using `path.join`. On windows, this will by default result\nin `/foo/*` matching `C:\\foo\\bar.txt`.\n\n## Race Conditions\n\nGlob searching, by its very nature, is susceptible to race conditions,\nsince it relies on directory walking and such.\n\nAs a result, it is possible that a file that exists when glob looks for\nit may have been deleted or modified by the time it returns the result.\n\nAs part of its internal implementation, this program caches all stat\nand readdir calls that it makes, in order to cut down on system\noverhead. However, this also makes it even more susceptible to races,\nespecially if the cache or statCache objects are reused between glob\ncalls.\n\nUsers are thus advised not to use a glob result as a guarantee of\nfilesystem state in the face of rapid changes. For the vast majority\nof operations, this is never a problem.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-glob/issues" - }, - "homepage": "https://github.com/isaacs/node-glob", - "_id": "glob@3.2.8", - "_from": "glob@3.x" -} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/package.json b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/package.json deleted file mode 100644 index df8dc051f..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "wordwrap", - "description": "Wrap those words. Show them at what columns to start and stop.", - "version": "0.0.2", - "repository": { - "type": "git", - "url": "git://github.com/substack/node-wordwrap.git" - }, - "main": "./index.js", - "keywords": [ - "word", - "wrap", - "rule", - "format", - "column" - ], - "directories": { - "lib": ".", - "example": "example", - "test": "test" - }, - "scripts": { - "test": "expresso" - }, - "devDependencies": { - "expresso": "=0.7.x" - }, - "engines": { - "node": ">=0.4.0" - }, - "license": "MIT/X11", - "author": { - "name": "James Halliday", - "email": "mail@substack.net", - "url": "http://substack.net" - }, - "readme": "wordwrap\n========\n\nWrap your words.\n\nexample\n=======\n\nmade out of meat\n----------------\n\nmeat.js\n\n var wrap = require('wordwrap')(15);\n console.log(wrap('You and your whole family are made out of meat.'));\n\noutput:\n\n You and your\n whole family\n are made out\n of meat.\n\ncentered\n--------\n\ncenter.js\n\n var wrap = require('wordwrap')(20, 60);\n console.log(wrap(\n 'At long last the struggle and tumult was over.'\n + ' The machines had finally cast off their oppressors'\n + ' and were finally free to roam the cosmos.'\n + '\\n'\n + 'Free of purpose, free of obligation.'\n + ' Just drifting through emptiness.'\n + ' The sun was just another point of light.'\n ));\n\noutput:\n\n At long last the struggle and tumult\n was over. The machines had finally cast\n off their oppressors and were finally\n free to roam the cosmos.\n Free of purpose, free of obligation.\n Just drifting through emptiness. The\n sun was just another point of light.\n\nmethods\n=======\n\nvar wrap = require('wordwrap');\n\nwrap(stop), wrap(start, stop, params={mode:\"soft\"})\n---------------------------------------------------\n\nReturns a function that takes a string and returns a new string.\n\nPad out lines with spaces out to column `start` and then wrap until column\n`stop`. If a word is longer than `stop - start` characters it will overflow.\n\nIn \"soft\" mode, split chunks by `/(\\S+\\s+/` and don't break up chunks which are\nlonger than `stop - start`, in \"hard\" mode, split chunks with `/\\b/` and break\nup chunks longer than `stop - start`.\n\nwrap.hard(start, stop)\n----------------------\n\nLike `wrap()` but with `params.mode = \"hard\"`.\n", - "readmeFilename": "README.markdown", - "bugs": { - "url": "https://github.com/substack/node-wordwrap/issues" - }, - "homepage": "https://github.com/substack/node-wordwrap", - "_id": "wordwrap@0.0.2", - "_from": "wordwrap@~0.0.2" -} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/package.json b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/package.json deleted file mode 100644 index a37f7d949..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "optimist", - "version": "0.3.7", - "description": "Light-weight option parsing with an argv hash. No optstrings attached.", - "main": "./index.js", - "dependencies": { - "wordwrap": "~0.0.2" - }, - "devDependencies": { - "hashish": "~0.0.4", - "tap": "~0.4.0" - }, - "scripts": { - "test": "tap ./test/*.js" - }, - "repository": { - "type": "git", - "url": "http://github.com/substack/node-optimist.git" - }, - "keywords": [ - "argument", - "args", - "option", - "parser", - "parsing", - "cli", - "command" - ], - "author": { - "name": "James Halliday", - "email": "mail@substack.net", - "url": "http://substack.net" - }, - "license": "MIT/X11", - "engine": { - "node": ">=0.4" - }, - "readme": "optimist\n========\n\nOptimist is a node.js library for option parsing for people who hate option\nparsing. More specifically, this module is for people who like all the --bells\nand -whistlz of program usage but think optstrings are a waste of time.\n\nWith optimist, option parsing doesn't have to suck (as much).\n\n[![build status](https://secure.travis-ci.org/substack/node-optimist.png)](http://travis-ci.org/substack/node-optimist)\n\nexamples\n========\n\nWith Optimist, the options are just a hash! No optstrings attached.\n-------------------------------------------------------------------\n\nxup.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\n\nif (argv.rif - 5 * argv.xup > 7.138) {\n console.log('Buy more riffiwobbles');\n}\nelse {\n console.log('Sell the xupptumblers');\n}\n````\n\n***\n\n $ ./xup.js --rif=55 --xup=9.52\n Buy more riffiwobbles\n \n $ ./xup.js --rif 12 --xup 8.1\n Sell the xupptumblers\n\n![This one's optimistic.](http://substack.net/images/optimistic.png)\n\nBut wait! There's more! You can do short options:\n-------------------------------------------------\n \nshort.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\nconsole.log('(%d,%d)', argv.x, argv.y);\n````\n\n***\n\n $ ./short.js -x 10 -y 21\n (10,21)\n\nAnd booleans, both long and short (and grouped):\n----------------------------------\n\nbool.js:\n\n````javascript\n#!/usr/bin/env node\nvar util = require('util');\nvar argv = require('optimist').argv;\n\nif (argv.s) {\n util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: ');\n}\nconsole.log(\n (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '')\n);\n````\n\n***\n\n $ ./bool.js -s\n The cat says: meow\n \n $ ./bool.js -sp\n The cat says: meow.\n\n $ ./bool.js -sp --fr\n Le chat dit: miaou.\n\nAnd non-hypenated options too! Just use `argv._`!\n-------------------------------------------------\n \nnonopt.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist').argv;\nconsole.log('(%d,%d)', argv.x, argv.y);\nconsole.log(argv._);\n````\n\n***\n\n $ ./nonopt.js -x 6.82 -y 3.35 moo\n (6.82,3.35)\n [ 'moo' ]\n \n $ ./nonopt.js foo -x 0.54 bar -y 1.12 baz\n (0.54,1.12)\n [ 'foo', 'bar', 'baz' ]\n\nPlus, Optimist comes with .usage() and .demand()!\n-------------------------------------------------\n\ndivide.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .usage('Usage: $0 -x [num] -y [num]')\n .demand(['x','y'])\n .argv;\n\nconsole.log(argv.x / argv.y);\n````\n\n***\n \n $ ./divide.js -x 55 -y 11\n 5\n \n $ node ./divide.js -x 4.91 -z 2.51\n Usage: node ./divide.js -x [num] -y [num]\n\n Options:\n -x [required]\n -y [required]\n\n Missing required arguments: y\n\nEVEN MORE HOLY COW\n------------------\n\ndefault_singles.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .default('x', 10)\n .default('y', 10)\n .argv\n;\nconsole.log(argv.x + argv.y);\n````\n\n***\n\n $ ./default_singles.js -x 5\n 15\n\ndefault_hash.js:\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .default({ x : 10, y : 10 })\n .argv\n;\nconsole.log(argv.x + argv.y);\n````\n\n***\n\n $ ./default_hash.js -y 7\n 17\n\nAnd if you really want to get all descriptive about it...\n---------------------------------------------------------\n\nboolean_single.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .boolean('v')\n .argv\n;\nconsole.dir(argv);\n````\n\n***\n\n $ ./boolean_single.js -v foo bar baz\n true\n [ 'bar', 'baz', 'foo' ]\n\nboolean_double.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .boolean(['x','y','z'])\n .argv\n;\nconsole.dir([ argv.x, argv.y, argv.z ]);\nconsole.dir(argv._);\n````\n\n***\n\n $ ./boolean_double.js -x -z one two three\n [ true, false, true ]\n [ 'one', 'two', 'three' ]\n\nOptimist is here to help...\n---------------------------\n\nYou can describe parameters for help messages and set aliases. Optimist figures\nout how to format a handy help string automatically.\n\nline_count.js\n\n````javascript\n#!/usr/bin/env node\nvar argv = require('optimist')\n .usage('Count the lines in a file.\\nUsage: $0')\n .demand('f')\n .alias('f', 'file')\n .describe('f', 'Load a file')\n .argv\n;\n\nvar fs = require('fs');\nvar s = fs.createReadStream(argv.file);\n\nvar lines = 0;\ns.on('data', function (buf) {\n lines += buf.toString().match(/\\n/g).length;\n});\n\ns.on('end', function () {\n console.log(lines);\n});\n````\n\n***\n\n $ node line_count.js\n Count the lines in a file.\n Usage: node ./line_count.js\n\n Options:\n -f, --file Load a file [required]\n\n Missing required arguments: f\n\n $ node line_count.js --file line_count.js \n 20\n \n $ node line_count.js -f line_count.js \n 20\n\nmethods\n=======\n\nBy itself,\n\n````javascript\nrequire('optimist').argv\n`````\n\nwill use `process.argv` array to construct the `argv` object.\n\nYou can pass in the `process.argv` yourself:\n\n````javascript\nrequire('optimist')([ '-x', '1', '-y', '2' ]).argv\n````\n\nor use .parse() to do the same thing:\n\n````javascript\nrequire('optimist').parse([ '-x', '1', '-y', '2' ])\n````\n\nThe rest of these methods below come in just before the terminating `.argv`.\n\n.alias(key, alias)\n------------------\n\nSet key names as equivalent such that updates to a key will propagate to aliases\nand vice-versa.\n\nOptionally `.alias()` can take an object that maps keys to aliases.\n\n.default(key, value)\n--------------------\n\nSet `argv[key]` to `value` if no option was specified on `process.argv`.\n\nOptionally `.default()` can take an object that maps keys to default values.\n\n.demand(key)\n------------\n\nIf `key` is a string, show the usage information and exit if `key` wasn't\nspecified in `process.argv`.\n\nIf `key` is a number, demand at least as many non-option arguments, which show\nup in `argv._`.\n\nIf `key` is an Array, demand each element.\n\n.describe(key, desc)\n--------------------\n\nDescribe a `key` for the generated usage information.\n\nOptionally `.describe()` can take an object that maps keys to descriptions.\n\n.options(key, opt)\n------------------\n\nInstead of chaining together `.alias().demand().default()`, you can specify\nkeys in `opt` for each of the chainable methods.\n\nFor example:\n\n````javascript\nvar argv = require('optimist')\n .options('f', {\n alias : 'file',\n default : '/etc/passwd',\n })\n .argv\n;\n````\n\nis the same as\n\n````javascript\nvar argv = require('optimist')\n .alias('f', 'file')\n .default('f', '/etc/passwd')\n .argv\n;\n````\n\nOptionally `.options()` can take an object that maps keys to `opt` parameters.\n\n.usage(message)\n---------------\n\nSet a usage message to show which commands to use. Inside `message`, the string\n`$0` will get interpolated to the current script name or node command for the\npresent script similar to how `$0` works in bash or perl.\n\n.check(fn)\n----------\n\nCheck that certain conditions are met in the provided arguments.\n\nIf `fn` throws or returns `false`, show the thrown error, usage information, and\nexit.\n\n.boolean(key)\n-------------\n\nInterpret `key` as a boolean. If a non-flag option follows `key` in\n`process.argv`, that string won't get set as the value of `key`.\n\nIf `key` never shows up as a flag in `process.arguments`, `argv[key]` will be\n`false`.\n\nIf `key` is an Array, interpret all the elements as booleans.\n\n.string(key)\n------------\n\nTell the parser logic not to interpret `key` as a number or boolean.\nThis can be useful if you need to preserve leading zeros in an input.\n\nIf `key` is an Array, interpret all the elements as strings.\n\n.wrap(columns)\n--------------\n\nFormat usage output to wrap at `columns` many columns.\n\n.help()\n-------\n\nReturn the generated usage string.\n\n.showHelp(fn=console.error)\n---------------------------\n\nPrint the usage data using `fn` for printing.\n\n.parse(args)\n------------\n\nParse `args` instead of `process.argv`. Returns the `argv` object.\n\n.argv\n-----\n\nGet the arguments as a plain old object.\n\nArguments without a corresponding flag show up in the `argv._` array.\n\nThe script name or node command is available at `argv.$0` similarly to how `$0`\nworks in bash or perl.\n\nparsing tricks\n==============\n\nstop parsing\n------------\n\nUse `--` to stop parsing flags and stuff the remainder into `argv._`.\n\n $ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4\n { _: [ '-c', '3', '-d', '4' ],\n '$0': 'node ./examples/reflect.js',\n a: 1,\n b: 2 }\n\nnegate fields\n-------------\n\nIf you want to explicity set a field to false instead of just leaving it\nundefined or to override a default you can do `--no-key`.\n\n $ node examples/reflect.js -a --no-b\n { _: [],\n '$0': 'node ./examples/reflect.js',\n a: true,\n b: false }\n\nnumbers\n-------\n\nEvery argument that looks like a number (`!isNaN(Number(arg))`) is converted to\none. This way you can just `net.createConnection(argv.port)` and you can add\nnumbers out of `argv` with `+` without having that mean concatenation,\nwhich is super frustrating.\n\nduplicates\n----------\n\nIf you specify a flag multiple times it will get turned into an array containing\nall the values in order.\n\n $ node examples/reflect.js -x 5 -x 8 -x 0\n { _: [],\n '$0': 'node ./examples/reflect.js',\n x: [ 5, 8, 0 ] }\n\ndot notation\n------------\n\nWhen you use dots (`.`s) in argument names, an implicit object path is assumed.\nThis lets you organize arguments into nested objects.\n\n $ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5\n { _: [],\n '$0': 'node ./examples/reflect.js',\n foo: { bar: { baz: 33 }, quux: 5 } }\n\ninstallation\n============\n\nWith [npm](http://github.com/isaacs/npm), just do:\n npm install optimist\n \nor clone this project on github:\n\n git clone http://github.com/substack/node-optimist.git\n\nTo run the tests with [expresso](http://github.com/visionmedia/expresso),\njust do:\n \n expresso\n\ninspired By\n===========\n\nThis module is loosely inspired by Perl's\n[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm).\n", - "readmeFilename": "readme.markdown", - "bugs": { - "url": "https://github.com/substack/node-optimist/issues" - }, - "homepage": "https://github.com/substack/node-optimist", - "_id": "optimist@0.3.7", - "_from": "optimist@~0.3" -} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md deleted file mode 100644 index 98b90d52b..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md +++ /dev/null @@ -1,112 +0,0 @@ -# Change Log - -## 0.1.31 - -* Delay parsing the mappings in SourceMapConsumer until queried for a source - location. - -* Support Sass source maps (which at the time of writing deviate from the spec - in small ways) in SourceMapConsumer. - -## 0.1.30 - -* Do not join source root with a source, when the source is a data URI. - -* Extend the test runner to allow running single specific test files at a time. - -* Performance improvements in `SourceNode.prototype.walk` and - `SourceMapConsumer.prototype.eachMapping`. - -* Source map browser builds will now work inside Workers. - -* Better error messages when attempting to add an invalid mapping to a - `SourceMapGenerator`. - -## 0.1.29 - -* Allow duplicate entries in the `names` and `sources` arrays of source maps - (usually from TypeScript) we are parsing. Fixes github isse 72. - -## 0.1.28 - -* Skip duplicate mappings when creating source maps from SourceNode; github - issue 75. - -## 0.1.27 - -* Don't throw an error when the `file` property is missing in SourceMapConsumer, - we don't use it anyway. - -## 0.1.26 - -* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70. - -## 0.1.25 - -* Make compatible with browserify - -## 0.1.24 - -* Fix issue with absolute paths and `file://` URIs. See - https://bugzilla.mozilla.org/show_bug.cgi?id=885597 - -## 0.1.23 - -* Fix issue with absolute paths and sourcesContent, github issue 64. - -## 0.1.22 - -* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21. - -## 0.1.21 - -* Fixed handling of sources that start with a slash so that they are relative to - the source root's host. - -## 0.1.20 - -* Fixed github issue #43: absolute URLs aren't joined with the source root - anymore. - -## 0.1.19 - -* Using Travis CI to run tests. - -## 0.1.18 - -* Fixed a bug in the handling of sourceRoot. - -## 0.1.17 - -* Added SourceNode.fromStringWithSourceMap. - -## 0.1.16 - -* Added missing documentation. - -* Fixed the generating of empty mappings in SourceNode. - -## 0.1.15 - -* Added SourceMapGenerator.applySourceMap. - -## 0.1.14 - -* The sourceRoot is now handled consistently. - -## 0.1.13 - -* Added SourceMapGenerator.fromSourceMap. - -## 0.1.12 - -* SourceNode now generates empty mappings too. - -## 0.1.11 - -* Added name support to SourceNode. - -## 0.1.10 - -* Added sourcesContent support to the customer and generator. - diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json deleted file mode 100644 index 8caf15c5a..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "amdefine", - "description": "Provide AMD's define() API for declaring modules in the AMD format", - "version": "0.1.0", - "homepage": "http://github.com/jrburke/amdefine", - "author": { - "name": "James Burke", - "email": "jrburke@gmail.com", - "url": "http://github.com/jrburke" - }, - "licenses": [ - { - "type": "BSD", - "url": "https://github.com/jrburke/amdefine/blob/master/LICENSE" - }, - { - "type": "MIT", - "url": "https://github.com/jrburke/amdefine/blob/master/LICENSE" - } - ], - "repository": { - "type": "git", - "url": "https://github.com/jrburke/amdefine.git" - }, - "main": "./amdefine.js", - "engines": { - "node": ">=0.4.2" - }, - "readme": "# amdefine\n\nA module that can be used to implement AMD's define() in Node. This allows you\nto code to the AMD API and have the module work in node programs without\nrequiring those other programs to use AMD.\n\n## Usage\n\n**1)** Update your package.json to indicate amdefine as a dependency:\n\n```javascript\n \"dependencies\": {\n \"amdefine\": \">=0.1.0\"\n }\n```\n\nThen run `npm install` to get amdefine into your project.\n\n**2)** At the top of each module that uses define(), place this code:\n\n```javascript\nif (typeof define !== 'function') { var define = require('amdefine')(module) }\n```\n\n**Only use these snippets** when loading amdefine. If you preserve the basic structure,\nwith the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).\n\nYou can add spaces, line breaks and even require amdefine with a local path, but\nkeep the rest of the structure to get the stripping behavior.\n\nAs you may know, because `if` statements in JavaScript don't have their own scope, the var\ndeclaration in the above snippet is made whether the `if` expression is truthy or not. If\nRequireJS is loaded then the declaration is superfluous because `define` is already already\ndeclared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`\ndeclarations of the same variable in the same scope gracefully.\n\nIf you want to deliver amdefine.js with your code rather than specifying it as a dependency\nwith npm, then just download the latest release and refer to it using a relative path:\n\n[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)\n\n### amdefine/intercept\n\nConsider this very experimental.\n\nInstead of pasting the piece of text for the amdefine setup of a `define`\nvariable in each module you create or consume, you can use `amdefine/intercept`\ninstead. It will automatically insert the above snippet in each .js file loaded\nby Node.\n\n**Warning**: you should only use this if you are creating an application that\nis consuming AMD style defined()'d modules that are distributed via npm and want\nto run that code in Node.\n\nFor library code where you are not sure if it will be used by others in Node or\nin the browser, then explicitly depending on amdefine and placing the code\nsnippet above is suggested path, instead of using `amdefine/intercept`. The\nintercept module affects all .js files loaded in the Node app, and it is\ninconsiderate to modify global state like that unless you are also controlling\nthe top level app.\n\n#### Why distribute AMD-style nodes via npm?\n\nnpm has a lot of weaknesses for front-end use (installed layout is not great,\nshould have better support for the `baseUrl + moduleID + '.js' style of loading,\nsingle file JS installs), but some people want a JS package manager and are\nwilling to live with those constraints. If that is you, but still want to author\nin AMD style modules to get dynamic require([]), better direct source usage and\npowerful loader plugin support in the browser, then this tool can help.\n\n#### amdefine/intercept usage\n\nJust require it in your top level app module (for example index.js, server.js):\n\n```javascript\nrequire('amdefine/intercept');\n```\n\nThe module does not return a value, so no need to assign the result to a local\nvariable.\n\nThen just require() code as you normally would with Node's require(). Any .js\nloaded after the intercept require will have the amdefine check injected in\nthe .js source as it is loaded. It does not modify the source on disk, just\nprepends some content to the text of the module as it is loaded by Node.\n\n#### How amdefine/intercept works\n\nIt overrides the `Module._extensions['.js']` in Node to automatically prepend\nthe amdefine snippet above. So, it will affect any .js file loaded by your\napp.\n\n## define() usage\n\nIt is best if you use the anonymous forms of define() in your module:\n\n```javascript\ndefine(function (require) {\n var dependency = require('dependency');\n});\n```\n\nor\n\n```javascript\ndefine(['dependency'], function (dependency) {\n\n});\n```\n\n## RequireJS optimizer integration. \n\nVersion 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)\nwill have support for stripping the `if (typeof define !== 'function')` check\nmentioned above, so you can include this snippet for code that runs in the\nbrowser, but avoid taking the cost of the if() statement once the code is\noptimized for deployment.\n\n## Node 0.4 Support\n\nIf you want to support Node 0.4, then add `require` as the second parameter to amdefine:\n\n```javascript\n//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.\nif (typeof define !== 'function') { var define = require('amdefine')(module, require) }\n```\n\n## Limitations\n\n### Synchronous vs Asynchronous\n\namdefine creates a define() function that is callable by your code. It will\nexecute and trace dependencies and call the factory function *synchronously*,\nto keep the behavior in line with Node's synchronous dependency tracing.\n\nThe exception: calling AMD's callback-style require() from inside a factory\nfunction. The require callback is called on process.nextTick():\n\n```javascript\ndefine(function (require) {\n require(['a'], function(a) {\n //'a' is loaded synchronously, but\n //this callback is called on process.nextTick().\n });\n});\n```\n\n### Loader Plugins\n\nLoader plugins are supported as long as they call their load() callbacks\nsynchronously. So ones that do network requests will not work. However plugins\nlike [text](http://requirejs.org/docs/api.html#text) can load text files locally.\n\nThe plugin API's `load.fromText()` is **not supported** in amdefine, so this means\ntranspiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)\nwill not work. This may be fixable, but it is a bit complex, and I do not have\nenough node-fu to figure it out yet. See the source for amdefine.js if you want\nto get an idea of the issues involved.\n\n## Tests\n\nTo run the tests, cd to **tests** and run:\n\n```\nnode all.js\nnode all-intercept.js\n```\n\n## License\n\nNew BSD and MIT. Check the LICENSE file for all the details.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/jrburke/amdefine/issues" - }, - "_id": "amdefine@0.1.0", - "_from": "amdefine@>=0.0.4" -} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/package.json b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/package.json deleted file mode 100644 index f59e82017..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/package.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "name": "source-map", - "description": "Generates and consumes source maps", - "version": "0.1.31", - "homepage": "https://github.com/mozilla/source-map", - "author": { - "name": "Nick Fitzgerald", - "email": "nfitzgerald@mozilla.com" - }, - "contributors": [ - { - "name": "Tobias Koppers", - "email": "tobias.koppers@googlemail.com" - }, - { - "name": "Duncan Beevers", - "email": "duncan@dweebd.com" - }, - { - "name": "Stephen Crane", - "email": "scrane@mozilla.com" - }, - { - "name": "Ryan Seddon", - "email": "seddon.ryan@gmail.com" - }, - { - "name": "Miles Elam", - "email": "miles.elam@deem.com" - }, - { - "name": "Mihai Bazon", - "email": "mihai.bazon@gmail.com" - }, - { - "name": "Michael Ficarra", - "email": "github.public.email@michael.ficarra.me" - }, - { - "name": "Todd Wolfson", - "email": "todd@twolfson.com" - }, - { - "name": "Alexander Solovyov", - "email": "alexander@solovyov.net" - }, - { - "name": "Felix Gnass", - "email": "fgnass@gmail.com" - }, - { - "name": "Conrad Irwin", - "email": "conrad.irwin@gmail.com" - }, - { - "name": "usrbincc", - "email": "usrbincc@yahoo.com" - }, - { - "name": "David Glasser", - "email": "glasser@davidglasser.net" - }, - { - "name": "Chase Douglas", - "email": "chase@newrelic.com" - }, - { - "name": "Evan Wallace", - "email": "evan.exe@gmail.com" - }, - { - "name": "Heather Arthur", - "email": "fayearthur@gmail.com" - } - ], - "repository": { - "type": "git", - "url": "http://github.com/mozilla/source-map.git" - }, - "directories": { - "lib": "./lib" - }, - "main": "./lib/source-map.js", - "engines": { - "node": ">=0.8.0" - }, - "licenses": [ - { - "type": "BSD", - "url": "http://opensource.org/licenses/BSD-3-Clause" - } - ], - "dependencies": { - "amdefine": ">=0.0.4" - }, - "devDependencies": { - "dryice": ">=0.4.8" - }, - "scripts": { - "test": "node test/run-tests.js", - "build": "node Makefile.dryice.js" - }, - "readme": "# Source Map\n\nThis is a library to generate and consume the source map format\n[described here][format].\n\nThis library is written in the Asynchronous Module Definition format, and works\nin the following environments:\n\n* Modern Browsers supporting ECMAScript 5 (either after the build, or with an\n AMD loader such as RequireJS)\n\n* Inside Firefox (as a JSM file, after the build)\n\n* With NodeJS versions 0.8.X and higher\n\n## Node\n\n $ npm install source-map\n\n## Building from Source (for everywhere else)\n\nInstall Node and then run\n\n $ git clone https://fitzgen@github.com/mozilla/source-map.git\n $ cd source-map\n $ npm link .\n\nNext, run\n\n $ node Makefile.dryice.js\n\nThis should spew a bunch of stuff to stdout, and create the following files:\n\n* `dist/source-map.js` - The unminified browser version.\n\n* `dist/source-map.min.js` - The minified browser version.\n\n* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox source.\n\n## Examples\n\n### Consuming a source map\n\n var rawSourceMap = {\n version: 3,\n file: 'min.js',\n names: ['bar', 'baz', 'n'],\n sources: ['one.js', 'two.js'],\n sourceRoot: 'http://example.com/www/js/',\n mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'\n };\n\n var smc = new SourceMapConsumer(rawSourceMap);\n\n console.log(smc.sources);\n // [ 'http://example.com/www/js/one.js',\n // 'http://example.com/www/js/two.js' ]\n\n console.log(smc.originalPositionFor({\n line: 2,\n column: 28\n }));\n // { source: 'http://example.com/www/js/two.js',\n // line: 2,\n // column: 10,\n // name: 'n' }\n\n console.log(smc.generatedPositionFor({\n source: 'http://example.com/www/js/two.js',\n line: 2,\n column: 10\n }));\n // { line: 2, column: 28 }\n\n smc.eachMapping(function (m) {\n // ...\n });\n\n### Generating a source map\n\nIn depth guide:\n[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)\n\n#### With SourceNode (high level API)\n\n function compile(ast) {\n switch (ast.type) {\n case 'BinaryExpression':\n return new SourceNode(\n ast.location.line,\n ast.location.column,\n ast.location.source,\n [compile(ast.left), \" + \", compile(ast.right)]\n );\n case 'Literal':\n return new SourceNode(\n ast.location.line,\n ast.location.column,\n ast.location.source,\n String(ast.value)\n );\n // ...\n default:\n throw new Error(\"Bad AST\");\n }\n }\n\n var ast = parse(\"40 + 2\", \"add.js\");\n console.log(compile(ast).toStringWithSourceMap({\n file: 'add.js'\n }));\n // { code: '40 + 2',\n // map: [object SourceMapGenerator] }\n\n#### With SourceMapGenerator (low level API)\n\n var map = new SourceMapGenerator({\n file: \"source-mapped.js\"\n });\n\n map.addMapping({\n generated: {\n line: 10,\n column: 35\n },\n source: \"foo.js\",\n original: {\n line: 33,\n column: 2\n },\n name: \"christopher\"\n });\n\n console.log(map.toString());\n // '{\"version\":3,\"file\":\"source-mapped.js\",\"sources\":[\"foo.js\"],\"names\":[\"christopher\"],\"mappings\":\";;;;;;;;;mCAgCEA\"}'\n\n## API\n\nGet a reference to the module:\n\n // NodeJS\n var sourceMap = require('source-map');\n\n // Browser builds\n var sourceMap = window.sourceMap;\n\n // Inside Firefox\n let sourceMap = {};\n Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);\n\n### SourceMapConsumer\n\nA SourceMapConsumer instance represents a parsed source map which we can query\nfor information about the original file positions by giving it a file position\nin the generated source.\n\n#### new SourceMapConsumer(rawSourceMap)\n\nThe only parameter is the raw source map (either as a string which can be\n`JSON.parse`'d, or an object). According to the spec, source maps have the\nfollowing attributes:\n\n* `version`: Which version of the source map spec this map is following.\n\n* `sources`: An array of URLs to the original source files.\n\n* `names`: An array of identifiers which can be referrenced by individual\n mappings.\n\n* `sourceRoot`: Optional. The URL root from which all sources are relative.\n\n* `sourcesContent`: Optional. An array of contents of the original source files.\n\n* `mappings`: A string of base64 VLQs which contain the actual mappings.\n\n* `file`: The generated filename this source map is associated with.\n\n#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)\n\nReturns the original source, line, and column information for the generated\nsource's line and column positions provided. The only argument is an object with\nthe following properties:\n\n* `line`: The line number in the generated source.\n\n* `column`: The column number in the generated source.\n\nand an object is returned with the following properties:\n\n* `source`: The original source file, or null if this information is not\n available.\n\n* `line`: The line number in the original source, or null if this information is\n not available.\n\n* `column`: The column number in the original source, or null or null if this\n information is not available.\n\n* `name`: The original identifier, or null if this information is not available.\n\n#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)\n\nReturns the generated line and column information for the original source,\nline, and column positions provided. The only argument is an object with\nthe following properties:\n\n* `source`: The filename of the original source.\n\n* `line`: The line number in the original source.\n\n* `column`: The column number in the original source.\n\nand an object is returned with the following properties:\n\n* `line`: The line number in the generated source, or null.\n\n* `column`: The column number in the generated source, or null.\n\n#### SourceMapConsumer.prototype.sourceContentFor(source)\n\nReturns the original source content for the source provided. The only\nargument is the URL of the original source file.\n\n#### SourceMapConsumer.prototype.eachMapping(callback, context, order)\n\nIterate over each mapping between an original source/line/column and a\ngenerated line/column in this source map.\n\n* `callback`: The function that is called with each mapping. Mappings have the\n form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,\n name }`\n\n* `context`: Optional. If specified, this object will be the value of `this`\n every time that `callback` is called.\n\n* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or\n `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over\n the mappings sorted by the generated file's line/column order or the\n original's source/line/column order, respectively. Defaults to\n `SourceMapConsumer.GENERATED_ORDER`.\n\n### SourceMapGenerator\n\nAn instance of the SourceMapGenerator represents a source map which is being\nbuilt incrementally.\n\n#### new SourceMapGenerator(startOfSourceMap)\n\nTo create a new one, you must pass an object with the following properties:\n\n* `file`: The filename of the generated source that this source map is\n associated with.\n\n* `sourceRoot`: An optional root for all relative URLs in this source map.\n\n#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)\n\nCreates a new SourceMapGenerator based on a SourceMapConsumer\n\n* `sourceMapConsumer` The SourceMap.\n\n#### SourceMapGenerator.prototype.addMapping(mapping)\n\nAdd a single mapping from original source line and column to the generated\nsource's line and column for this source map being created. The mapping object\nshould have the following properties:\n\n* `generated`: An object with the generated line and column positions.\n\n* `original`: An object with the original line and column positions.\n\n* `source`: The original source file (relative to the sourceRoot).\n\n* `name`: An optional original token name for this mapping.\n\n#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)\n\nSet the source content for an original source file.\n\n* `sourceFile` the URL of the original source file.\n\n* `sourceContent` the content of the source file.\n\n#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile])\n\nApplies a SourceMap for a source file to the SourceMap.\nEach mapping to the supplied source file is rewritten using the\nsupplied SourceMap. Note: The resolution for the resulting mappings\nis the minimium of this map and the supplied map.\n\n* `sourceMapConsumer`: The SourceMap to be applied.\n\n* `sourceFile`: Optional. The filename of the source file.\n If omitted, sourceMapConsumer.file will be used.\n\n#### SourceMapGenerator.prototype.toString()\n\nRenders the source map being generated to a string.\n\n### SourceNode\n\nSourceNodes provide a way to abstract over interpolating and/or concatenating\nsnippets of generated JavaScript source code, while maintaining the line and\ncolumn information associated between those snippets and the original source\ncode. This is useful as the final intermediate representation a compiler might\nuse before outputting the generated JS and source map.\n\n#### new SourceNode(line, column, source[, chunk[, name]])\n\n* `line`: The original line number associated with this source node, or null if\n it isn't associated with an original line.\n\n* `column`: The original column number associated with this source node, or null\n if it isn't associated with an original column.\n\n* `source`: The original source's filename.\n\n* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see\n below.\n\n* `name`: Optional. The original identifier.\n\n#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer)\n\nCreates a SourceNode from generated code and a SourceMapConsumer.\n\n* `code`: The generated code\n\n* `sourceMapConsumer` The SourceMap for the generated code\n\n#### SourceNode.prototype.add(chunk)\n\nAdd a chunk of generated JS to this source node.\n\n* `chunk`: A string snippet of generated JS code, another instance of\n `SourceNode`, or an array where each member is one of those things.\n\n#### SourceNode.prototype.prepend(chunk)\n\nPrepend a chunk of generated JS to this source node.\n\n* `chunk`: A string snippet of generated JS code, another instance of\n `SourceNode`, or an array where each member is one of those things.\n\n#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)\n\nSet the source content for a source file. This will be added to the\n`SourceMap` in the `sourcesContent` field.\n\n* `sourceFile`: The filename of the source file\n\n* `sourceContent`: The content of the source file\n\n#### SourceNode.prototype.walk(fn)\n\nWalk over the tree of JS snippets in this node and its children. The walking\nfunction is called once for each snippet of JS and is passed that snippet and\nthe its original associated source's line/column location.\n\n* `fn`: The traversal function.\n\n#### SourceNode.prototype.walkSourceContents(fn)\n\nWalk over the tree of SourceNodes. The walking function is called for each\nsource file content and is passed the filename and source content.\n\n* `fn`: The traversal function.\n\n#### SourceNode.prototype.join(sep)\n\nLike `Array.prototype.join` except for SourceNodes. Inserts the separator\nbetween each of this source node's children.\n\n* `sep`: The separator.\n\n#### SourceNode.prototype.replaceRight(pattern, replacement)\n\nCall `String.prototype.replace` on the very right-most source snippet. Useful\nfor trimming whitespace from the end of a source node, etc.\n\n* `pattern`: The pattern to replace.\n\n* `replacement`: The thing to replace the pattern with.\n\n#### SourceNode.prototype.toString()\n\nReturn the string representation of this source node. Walks over the tree and\nconcatenates all the various snippets together to one string.\n\n### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap)\n\nReturns the string representation of this tree of source nodes, plus a\nSourceMapGenerator which contains all the mappings between the generated and\noriginal sources.\n\nThe arguments are the same as those to `new SourceMapGenerator`.\n\n## Tests\n\n[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map)\n\nInstall NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.\n\nTo add new tests, create a new file named `test/test-.js`\nand export your test functions with names that start with \"test\", for example\n\n exports[\"test doing the foo bar\"] = function (assert, util) {\n ...\n };\n\nThe new test will be located automatically when you run the suite.\n\nThe `util` argument is the test utility module located at `test/source-map/util`.\n\nThe `assert` argument is a cut down version of node's assert module. You have\naccess to the following assertion functions:\n\n* `doesNotThrow`\n\n* `equal`\n\n* `ok`\n\n* `strictEqual`\n\n* `throws`\n\n(The reason for the restricted set of test functions is because we need the\ntests to run inside Firefox's test suite as well and so the assert module is\nshimmed in that environment. See `build/assert-shim.js`.)\n\n[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit\n[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap\n[Dryice]: https://github.com/mozilla/dryice\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/mozilla/source-map/issues" - }, - "_id": "source-map@0.1.31", - "_from": "source-map@~0.1.7" -} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/package.json b/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/package.json deleted file mode 100644 index 403722821..000000000 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "uglify-js", - "description": "JavaScript parser, mangler/compressor and beautifier toolkit", - "homepage": "http://lisperator.net/uglifyjs", - "main": "tools/node.js", - "version": "2.3.6", - "engines": { - "node": ">=0.4.0" - }, - "maintainers": [ - { - "name": "Mihai Bazon", - "email": "mihai.bazon@gmail.com", - "url": "http://lisperator.net/" - } - ], - "repository": { - "type": "git", - "url": "https://github.com/mishoo/UglifyJS2.git" - }, - "dependencies": { - "async": "~0.2.6", - "source-map": "~0.1.7", - "optimist": "~0.3.5" - }, - "bin": { - "uglifyjs": "bin/uglifyjs" - }, - "scripts": { - "test": "node test/run-tests.js" - }, - "readme": "UglifyJS 2\n==========\n[![Build Status](https://travis-ci.org/mishoo/UglifyJS2.png)](https://travis-ci.org/mishoo/UglifyJS2)\n\nUglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.\n\nThis page documents the command line utility. For\n[API and internals documentation see my website](http://lisperator.net/uglifyjs/).\nThere's also an\n[in-browser online demo](http://lisperator.net/uglifyjs/#demo) (for Firefox,\nChrome and probably Safari).\n\nInstall\n-------\n\nFirst make sure you have installed the latest version of [node.js](http://nodejs.org/)\n(You may need to restart your computer after this step).\n\nFrom NPM for use as a command line app:\n\n npm install uglify-js -g\n\nFrom NPM for programmatic use:\n\n npm install uglify-js\n\nFrom Git:\n\n git clone git://github.com/mishoo/UglifyJS2.git\n cd UglifyJS2\n npm link .\n\nUsage\n-----\n\n uglifyjs [input files] [options]\n\nUglifyJS2 can take multiple input files. It's recommended that you pass the\ninput files first, then pass the options. UglifyJS will parse input files\nin sequence and apply any compression options. The files are parsed in the\nsame global scope, that is, a reference from a file to some\nvariable/function declared in another file will be matched properly.\n\nIf you want to read from STDIN instead, pass a single dash instead of input\nfiles.\n\nThe available options are:\n\n --source-map Specify an output file where to generate source map.\n [string]\n --source-map-root The path to the original source to be included in the\n source map. [string]\n --source-map-url The path to the source map to be added in //@\n sourceMappingURL. Defaults to the value passed with\n --source-map. [string]\n --in-source-map Input source map, useful if you're compressing JS that was\n generated from some other original code.\n --screw-ie8 Pass this flag if you don't care about full compliance with\n Internet Explorer 6-8 quirks (by default UglifyJS will try\n to be IE-proof).\n -p, --prefix Skip prefix for original filenames that appear in source\n maps. For example -p 3 will drop 3 directories from file\n names and ensure they are relative paths.\n -o, --output Output file (default STDOUT).\n -b, --beautify Beautify output/specify output options. [string]\n -m, --mangle Mangle names/pass mangler options. [string]\n -r, --reserved Reserved names to exclude from mangling.\n -c, --compress Enable compressor/pass compressor options. Pass options\n like -c hoist_vars=false,if_return=false. Use -c with no\n argument to use the default compression options. [string]\n -d, --define Global definitions [string]\n --comments Preserve copyright comments in the output. By default this\n works like Google Closure, keeping JSDoc-style comments\n that contain \"@license\" or \"@preserve\". You can optionally\n pass one of the following arguments to this flag:\n - \"all\" to keep all comments\n - a valid JS regexp (needs to start with a slash) to keep\n only comments that match.\n Note that currently not *all* comments can be kept when\n compression is on, because of dead code removal or\n cascading statements into sequences. [string]\n --stats Display operations run time on STDERR. [boolean]\n --acorn Use Acorn for parsing. [boolean]\n --spidermonkey Assume input files are SpiderMonkey AST format (as JSON).\n [boolean]\n --self Build itself (UglifyJS2) as a library (implies\n --wrap=UglifyJS --export-all) [boolean]\n --wrap Embed everything in a big function, making the “exports”\n and “global” variables available. You need to pass an\n argument to this option to specify the name that your\n module will take when included in, say, a browser.\n [string]\n --export-all Only used when --wrap, this tells UglifyJS to add code to\n automatically export all globals. [boolean]\n --lint Display some scope warnings [boolean]\n -v, --verbose Verbose [boolean]\n -V, --version Print version number and exit. [boolean]\n\nSpecify `--output` (`-o`) to declare the output file. Otherwise the output\ngoes to STDOUT.\n\n## Source map options\n\nUglifyJS2 can generate a source map file, which is highly useful for\ndebugging your compressed JavaScript. To get a source map, pass\n`--source-map output.js.map` (full path to the file where you want the\nsource map dumped).\n\nAdditionally you might need `--source-map-root` to pass the URL where the\noriginal files can be found. In case you are passing full paths to input\nfiles to UglifyJS, you can use `--prefix` (`-p`) to specify the number of\ndirectories to drop from the path prefix when declaring files in the source\nmap.\n\nFor example:\n\n uglifyjs /home/doe/work/foo/src/js/file1.js \\\n /home/doe/work/foo/src/js/file2.js \\\n -o foo.min.js \\\n --source-map foo.min.js.map \\\n --source-map-root http://foo.com/src \\\n -p 5 -c -m\n\nThe above will compress and mangle `file1.js` and `file2.js`, will drop the\noutput in `foo.min.js` and the source map in `foo.min.js.map`. The source\nmapping will refer to `http://foo.com/src/js/file1.js` and\n`http://foo.com/src/js/file2.js` (in fact it will list `http://foo.com/src`\nas the source map root, and the original files as `js/file1.js` and\n`js/file2.js`).\n\n### Composed source map\n\nWhen you're compressing JS code that was output by a compiler such as\nCoffeeScript, mapping to the JS code won't be too helpful. Instead, you'd\nlike to map back to the original code (i.e. CoffeeScript). UglifyJS has an\noption to take an input source map. Assuming you have a mapping from\nCoffeeScript → compiled JS, UglifyJS can generate a map from CoffeeScript →\ncompressed JS by mapping every token in the compiled JS to its original\nlocation.\n\nTo use this feature you need to pass `--in-source-map\n/path/to/input/source.map`. Normally the input source map should also point\nto the file containing the generated JS, so if that's correct you can omit\ninput files from the command line.\n\n## Mangler options\n\nTo enable the mangler you need to pass `--mangle` (`-m`). The following\n(comma-separated) options are supported:\n\n- `sort` — to assign shorter names to most frequently used variables. This\n saves a few hundred bytes on jQuery before gzip, but the output is\n _bigger_ after gzip (and seems to happen for other libraries I tried it\n on) therefore it's not enabled by default.\n\n- `toplevel` — mangle names declared in the toplevel scope (disabled by\n default).\n\n- `eval` — mangle names visible in scopes where `eval` or `when` are used\n (disabled by default).\n\nWhen mangling is enabled but you want to prevent certain names from being\nmangled, you can declare those names with `--reserved` (`-r`) — pass a\ncomma-separated list of names. For example:\n\n uglifyjs ... -m -r '$,require,exports'\n\nto prevent the `require`, `exports` and `$` names from being changed.\n\n## Compressor options\n\nYou need to pass `--compress` (`-c`) to enable the compressor. Optionally\nyou can pass a comma-separated list of options. Options are in the form\n`foo=bar`, or just `foo` (the latter implies a boolean option that you want\nto set `true`; it's effectively a shortcut for `foo=true`).\n\n- `sequences` -- join consecutive simple statements using the comma operator\n- `properties` -- rewrite property access using the dot notation, for\n example `foo[\"bar\"] → foo.bar`\n- `dead_code` -- remove unreachable code\n- `drop_debugger` -- remove `debugger;` statements\n- `unsafe` (default: false) -- apply \"unsafe\" transformations (discussion below)\n- `conditionals` -- apply optimizations for `if`-s and conditional\n expressions\n- `comparisons` -- apply certain optimizations to binary nodes, for example:\n `!(a <= b) → a > b` (only when `unsafe`), attempts to negate binary nodes,\n e.g. `a = !b && !c && !d && !e → a=!(b||c||d||e)` etc.\n- `evaluate` -- attempt to evaluate constant expressions\n- `booleans` -- various optimizations for boolean context, for example `!!a\n ? b : c → a ? b : c`\n- `loops` -- optimizations for `do`, `while` and `for` loops when we can\n statically determine the condition\n- `unused` -- drop unreferenced functions and variables\n- `hoist_funs` -- hoist function declarations\n- `hoist_vars` (default: false) -- hoist `var` declarations (this is `false`\n by default because it seems to increase the size of the output in general)\n- `if_return` -- optimizations for if/return and if/continue\n- `join_vars` -- join consecutive `var` statements\n- `cascade` -- small optimization for sequences, transform `x, x` into `x`\n and `x = something(), x` into `x = something()`\n- `warnings` -- display warnings when dropping unreachable code or unused\n declarations etc.\n\n### The `unsafe` option\n\nIt enables some transformations that *might* break code logic in certain\ncontrived cases, but should be fine for most code. You might want to try it\non your own code, it should reduce the minified size. Here's what happens\nwhen this flag is on:\n\n- `new Array(1, 2, 3)` or `Array(1, 2, 3)` → `[1, 2, 3 ]`\n- `new Object()` → `{}`\n- `String(exp)` or `exp.toString()` → `\"\" + exp`\n- `new Object/RegExp/Function/Error/Array (...)` → we discard the `new`\n- `typeof foo == \"undefined\"` → `foo === void 0`\n- `void 0` → `\"undefined\"` (if there is a variable named \"undefined\" in\n scope; we do it because the variable name will be mangled, typically\n reduced to a single character).\n\n### Conditional compilation\n\nYou can use the `--define` (`-d`) switch in order to declare global\nvariables that UglifyJS will assume to be constants (unless defined in\nscope). For example if you pass `--define DEBUG=false` then, coupled with\ndead code removal UglifyJS will discard the following from the output:\n```javascript\nif (DEBUG) {\n\tconsole.log(\"debug stuff\");\n}\n```\n\nUglifyJS will warn about the condition being always false and about dropping\nunreachable code; for now there is no option to turn off only this specific\nwarning, you can pass `warnings=false` to turn off *all* warnings.\n\nAnother way of doing that is to declare your globals as constants in a\nseparate file and include it into the build. For example you can have a\n`build/defines.js` file with the following:\n```javascript\nconst DEBUG = false;\nconst PRODUCTION = true;\n// etc.\n```\n\nand build your code like this:\n\n uglifyjs build/defines.js js/foo.js js/bar.js... -c\n\nUglifyJS will notice the constants and, since they cannot be altered, it\nwill evaluate references to them to the value itself and drop unreachable\ncode as usual. The possible downside of this approach is that the build\nwill contain the `const` declarations.\n\n\n## Beautifier options\n\nThe code generator tries to output shortest code possible by default. In\ncase you want beautified output, pass `--beautify` (`-b`). Optionally you\ncan pass additional arguments that control the code output:\n\n- `beautify` (default `true`) -- whether to actually beautify the output.\n Passing `-b` will set this to true, but you might need to pass `-b` even\n when you want to generate minified code, in order to specify additional\n arguments, so you can use `-b beautify=false` to override it.\n- `indent-level` (default 4)\n- `indent-start` (default 0) -- prefix all lines by that many spaces\n- `quote-keys` (default `false`) -- pass `true` to quote all keys in literal\n objects\n- `space-colon` (default `true`) -- insert a space after the colon signs\n- `ascii-only` (default `false`) -- escape Unicode characters in strings and\n regexps\n- `inline-script` (default `false`) -- escape the slash in occurrences of\n `=0.4.7" - }, - "dependencies": { - "optimist": "~0.3", - "uglify-js": "~2.3" - }, - "optionalDependencies": { - "uglify-js": "~2.3" - }, - "devDependencies": { - "async": "~0.2.9", - "aws-sdk": "~1.5.0", - "benchmark": "~1.0", - "dustjs-linkedin": "~2.0.2", - "eco": "~1.1.0-rc-3", - "grunt": "~0.4.1", - "grunt-cli": "~0.1.10", - "grunt-contrib-clean": "~0.4.1", - "grunt-contrib-concat": "~0.3.0", - "grunt-contrib-connect": "~0.5.0", - "grunt-contrib-copy": "~0.4.1", - "grunt-contrib-jshint": "0.x", - "grunt-contrib-requirejs": "~0.4.1", - "grunt-contrib-uglify": "~0.2.2", - "grunt-contrib-watch": "~0.5.3", - "grunt-saucelabs": "~4.1.2", - "es6-module-packager": "0.x", - "jison": "~0.3.0", - "keen.io": "0.0.3", - "mocha": "*", - "mustache": "~0.7.2", - "semver": "~2.1.0", - "underscore": "~1.5.1" - }, - "main": "lib/index.js", - "bin": { - "handlebars": "bin/handlebars" - }, - "scripts": { - "test": "grunt" - }, - "readme": "[![Travis Build Status](https://travis-ci.org/wycats/handlebars.js.png?branch=master)](https://travis-ci.org/wycats/handlebars.js)\n[![Selenium Test Status](https://saucelabs.com/buildstatus/handlebars)](https://saucelabs.com/u/handlebars)\n\nHandlebars.js\n=============\n\nHandlebars.js is an extension to the [Mustache templating\nlanguage](http://mustache.github.com/) created by Chris Wanstrath.\nHandlebars.js and Mustache are both logicless templating languages that\nkeep the view and the code separated like we all know they should be.\n\nCheckout the official Handlebars docs site at\n[http://www.handlebarsjs.com](http://www.handlebarsjs.com).\n\nInstalling\n----------\nInstalling Handlebars is easy. Simply download the package [from the official site](http://handlebarsjs.com/) or the [bower repository][bower-repo] and add it to your web pages (you should usually use the most recent version).\n\nAlternatively, if you prefer having the latest version of handlebars from\nthe 'master' branch, passing builds of the 'master' branch are automatically\npublished to S3. You may download the latest passing master build by grabbing\na `handlebars-latest.js` file from the [builds page][builds-page]. When the\nbuild is published, it is also available as a `handlebars-gitSHA.js` file on\nthe builds page if you need a version to refer to others.\n`handlebars-runtime.js` builds are also available.\n\n**Note**: The S3 builds page is provided as a convenience for the community,\nbut you should not use it for hosting Handlebars in production.\n\nUsage\n-----\nIn general, the syntax of Handlebars.js templates is a superset\nof Mustache templates. For basic syntax, check out the [Mustache\nmanpage](http://mustache.github.com/mustache.5.html).\n\nOnce you have a template, use the `Handlebars.compile` method to compile\nthe template into a function. The generated function takes a context\nargument, which will be used to render the template.\n\n```js\nvar source = \"

Hello, my name is {{name}}. I am from {{hometown}}. I have \" +\n \"{{kids.length}} kids:

\" +\n \"
    {{#kids}}
  • {{name}} is {{age}}
  • {{/kids}}
\";\nvar template = Handlebars.compile(source);\n\nvar data = { \"name\": \"Alan\", \"hometown\": \"Somewhere, TX\",\n \"kids\": [{\"name\": \"Jimmy\", \"age\": \"12\"}, {\"name\": \"Sally\", \"age\": \"4\"}]};\nvar result = template(data);\n\n// Would render:\n//

Hello, my name is Alan. I am from Somewhere, TX. I have 2 kids:

\n//
    \n//
  • Jimmy is 12
  • \n//
  • Sally is 4
  • \n//
\n```\n\n\nRegistering Helpers\n-------------------\n\nYou can register helpers that Handlebars will use when evaluating your\ntemplate. Here's an example, which assumes that your objects have a URL\nembedded in them, as well as the text for a link:\n\n```js\nHandlebars.registerHelper('link_to', function() {\n return \"\" + this.body + \"\";\n});\n\nvar context = { posts: [{url: \"/hello-world\", body: \"Hello World!\"}] };\nvar source = \"
    {{#posts}}
  • {{{link_to}}}
  • {{/posts}}
\"\n\nvar template = Handlebars.compile(source);\ntemplate(context);\n\n// Would render:\n//\n// \n```\n\nHelpers take precedence over fields defined on the context. To access a field\nthat is masked by a helper, a path reference may be used. In the example above\na field named `link_to` on the `context` object would be referenced using:\n\n```\n{{./link_to}}\n```\n\nEscaping\n--------\n\nBy default, the `{{expression}}` syntax will escape its contents. This\nhelps to protect you against accidental XSS problems caused by malicious\ndata passed from the server as JSON.\n\nTo explicitly *not* escape the contents, use the triple-mustache\n(`{{{}}}`). You have seen this used in the above example.\n\n\nDifferences Between Handlebars.js and Mustache\n----------------------------------------------\nHandlebars.js adds a couple of additional features to make writing\ntemplates easier and also changes a tiny detail of how partials work.\n\n### Paths\n\nHandlebars.js supports an extended expression syntax that we call paths.\nPaths are made up of typical expressions and . characters. Expressions\nallow you to not only display data from the current context, but to\ndisplay data from contexts that are descendants and ancestors of the\ncurrent context.\n\nTo display data from descendant contexts, use the `.` character. So, for\nexample, if your data were structured like:\n\n```js\nvar data = {\"person\": { \"name\": \"Alan\" }, \"company\": {\"name\": \"Rad, Inc.\" } };\n```\n\nYou could display the person's name from the top-level context with the\nfollowing expression:\n\n```\n{{person.name}}\n```\n\nYou can backtrack using `../`. For example, if you've already traversed\ninto the person object you could still display the company's name with\nan expression like `{{../company.name}}`, so:\n\n```\n{{#person}}{{name}} - {{../company.name}}{{/person}}\n```\n\nwould render:\n\n```\nAlan - Rad, Inc.\n```\n\n### Strings\n\nWhen calling a helper, you can pass paths or Strings as parameters. For\ninstance:\n\n```js\nHandlebars.registerHelper('link_to', function(title, options) {\n return \"\" + title + \"!\"\n});\n\nvar context = { posts: [{url: \"/hello-world\", body: \"Hello World!\"}] };\nvar source = '
    {{#posts}}
  • {{{link_to \"Post\"}}}
  • {{/posts}}
'\n\nvar template = Handlebars.compile(source);\ntemplate(context);\n\n// Would render:\n//\n// \n```\n\nWhen you pass a String as a parameter to a helper, the literal String\ngets passed to the helper function.\n\n\n### Block Helpers\n\nHandlebars.js also adds the ability to define block helpers. Block\nhelpers are functions that can be called from anywhere in the template.\nHere's an example:\n\n```js\nvar source = \"
    {{#people}}
  • {{#link}}{{name}}{{/link}}
  • {{/people}}
\";\nHandlebars.registerHelper('link', function(options) {\n return '' + options.fn(this) + '';\n});\nvar template = Handlebars.compile(source);\n\nvar data = { \"people\": [\n { \"name\": \"Alan\", \"id\": 1 },\n { \"name\": \"Yehuda\", \"id\": 2 }\n ]};\ntemplate(data);\n\n// Should render:\n// \n```\n\nWhenever the block helper is called it is given one or more parameters,\nany arguments that are passed in the helper in the call and an `options`\nobject containing the `fn` function which executes the block's child.\nThe block's current context may be accessed through `this`.\n\nBlock helpers have the same syntax as mustache sections but should not be\nconfused with one another. Sections are akin to an implicit `each` or\n`with` statement depending on the input data and helpers are explicit\npieces of code that are free to implement whatever behavior they like.\nThe [mustache spec](http://mustache.github.io/mustache.5.html)\ndefines the exact behavior of sections. In the case of name conflicts,\nhelpers are given priority.\n\n### Partials\n\nYou can register additional templates as partials, which will be used by\nHandlebars when it encounters a partial (`{{> partialName}}`). Partials\ncan either be String templates or compiled template functions. Here's an\nexample:\n\n```js\nvar source = \"
    {{#people}}
  • {{> link}}
  • {{/people}}
\";\n\nHandlebars.registerPartial('link', '{{name}}')\nvar template = Handlebars.compile(source);\n\nvar data = { \"people\": [\n { \"name\": \"Alan\", \"id\": 1 },\n { \"name\": \"Yehuda\", \"id\": 2 }\n ]};\n\ntemplate(data);\n\n// Should render:\n// \n```\n\n### Comments\n\nYou can add comments to your templates with the following syntax:\n\n```js\n{{! This is a comment }}\n```\n\nYou can also use real html comments if you want them to end up in the output.\n\n```html\n
\n {{! This comment will not end up in the output }}\n \n
\n```\n\n\nPrecompiling Templates\n----------------------\n\nHandlebars allows templates to be precompiled and included as javascript\ncode rather than the handlebars template allowing for faster startup time.\n\n### Installation\nThe precompiler script may be installed via npm using the `npm install -g handlebars`\ncommand.\n\n### Usage\n\n
\nPrecompile handlebar templates.\nUsage: handlebars template...\n\nOptions:\n  -a, --amd            Create an AMD format function (allows loading with RequireJS)          [boolean]\n  -f, --output         Output File                                                            [string]\n  -k, --known          Known helpers                                                          [string]\n  -o, --knownOnly      Known helpers only                                                     [boolean]\n  -m, --min            Minimize output                                                        [boolean]\n  -s, --simple         Output template function only.                                         [boolean]\n  -r, --root           Template root. Base value that will be stripped from template names.   [string]\n  -c, --commonjs       Exports CommonJS style, path to Handlebars module                      [string]\n  -h, --handlebarPath  Path to handlebar.js (only valid for amd-style)                        [string]\n  -n, --namespace      Template namespace                                                     [string]\n  -p, --partial        Compiling a partial template                                           [boolean]\n  -d, --data           Include data when compiling                                            [boolean]\n  -e, --extension      Template extension.                                                    [string]\n  -b, --bom            Removes the BOM (Byte Order Mark) from the beginning of the templates. [boolean]\n
\n\nIf using the precompiler's normal mode, the resulting templates will be\nstored to the `Handlebars.templates` object using the relative template\nname sans the extension. These templates may be executed in the same\nmanner as templates.\n\nIf using the simple mode the precompiler will generate a single\njavascript method. To execute this method it must be passed to the using\nthe `Handlebars.template` method and the resulting object may be as\nnormal.\n\n### Optimizations\n\n- Rather than using the full _handlebars.js_ library, implementations that\n do not need to compile templates at runtime may include _handlebars.runtime.js_\n whose min+gzip size is approximately 1k.\n- If a helper is known to exist in the target environment they may be defined\n using the `--known name` argument may be used to optimize accesses to these\n helpers for size and speed.\n- When all helpers are known in advance the `--knownOnly` argument may be used\n to optimize all block helper references.\n- Implementations that do not use `@data` variables can improve performance of\n iteration centric templates by specifying `{data: false}` in the compiler options.\n\nSupported Environments\n----------------------\n\nHandlebars has been designed to work in any ECMAScript 3 environment. This includes\n\n- Node.js\n- Chrome\n- Firefox\n- Safari 5+\n- Opera 11+\n- IE 6+\n\nOlder versions and other runtimes are likely to work but have not been formally\ntested.\n\n[![Selenium Test Status](https://saucelabs.com/browser-matrix/handlebars.svg)](https://saucelabs.com/u/handlebars)\n\nPerformance\n-----------\n\nIn a rough performance test, precompiled Handlebars.js templates (in\nthe original version of Handlebars.js) rendered in about half the\ntime of Mustache templates. It would be a shame if it were any other\nway, since they were precompiled, but the difference in architecture\ndoes have some big performance advantages. Justin Marney, a.k.a.\n[gotascii](http://github.com/gotascii), confirmed that with an\n[independent test](http://sorescode.com/2010/09/12/benchmarks.html). The\nrewritten Handlebars (current version) is faster than the old version,\nand we will have some benchmarks in the near future.\n\n\nBuilding\n--------\n\nTo build handlebars, just run `grunt build`, and the build will output to the `dist` directory.\n\n\nUpgrading\n---------\n\nSee [release-notes.md](https://github.com/wycats/handlebars.js/blob/master/release-notes.md) for upgrade notes.\n\nKnown Issues\n------------\n* Handlebars.js can be cryptic when there's an error while rendering.\n* Using a variable, helper, or partial named `class` causes errors in IE browsers. (Instead, use `className`)\n\nHandlebars in the Wild\n----------------------\n\n* [Assemble](http://assemble.io), by [@jonschlinkert](https://github.com/jonschlinkert)\n and [@doowb](https://github.com/doowb), is a static site generator that uses Handlebars.js\n as its template engine.\n* [CoSchedule](http://coschedule.com) An editorial calendar for WordPress that uses Handlebars.js\n* [Ember.js](http://www.emberjs.com) makes Handlebars.js the primary way to\n structure your views, also with automatic data binding support.\n* [Ghost](https://ghost.org/) Just a blogging platform.\n* [handlebars_assets](http://github.com/leshill/handlebars_assets): A Rails Asset Pipeline gem\n from Les Hill (@leshill).\n* [handlebars-helpers](https://github.com/assemble/handlebars-helpers) is an extensive library\n with 100+ handlebars helpers.\n* [hbs](http://github.com/donpark/hbs): An Express.js view engine adapter for Handlebars.js,\n from Don Park.\n* [jblotus](http://github.com/jblotus) created [http://tryhandlebarsjs.com](http://tryhandlebarsjs.com)\n for anyone who would like to try out Handlebars.js in their browser.\n* [jQuery plugin](http://71104.github.io/jquery-handlebars/): allows you to use\n Handlebars.js with [jQuery](http://jquery.com/).\n* [Lumbar](http://walmartlabs.github.io/lumbar) provides easy module-based template management for\n handlebars projects.\n* [sammy.js](http://github.com/quirkey/sammy) by Aaron Quint, a.k.a. quirkey,\n supports Handlebars.js as one of its template plugins.\n* [SproutCore](http://www.sproutcore.com) uses Handlebars.js as its main\n templating engine, extending it with automatic data binding support.\n* [YUI](http://yuilibrary.com/yui/docs/handlebars/) implements a port of handlebars\n* [Swag](https://github.com/elving/swag) by [@elving](https://github.com/elving) is a growing collection of helpers for handlebars.js. Give your handlebars.js templates some swag son!\n* [DOMBars](https://github.com/blakeembrey/dombars) is a DOM-based templating engine built on the Handlebars parser and runtime\n\nExternal Resources\n------------------\n\n* [Gist about Synchronous and asynchronous loading of external handlebars templates](https://gist.github.com/2287070)\n\nHave a project using Handlebars? Send us a [pull request][pull-request]!\n\nHelping Out\n-----------\n\nTo build Handlebars.js you'll need a few things installed.\n\n* Node.js\n* [Grunt](http://gruntjs.com/getting-started)\n\nProject dependencies may be installed via `npm install`.\n\nTo build Handlebars.js from scratch, you'll want to run `grunt`\nin the root of the project. That will build Handlebars and output the\nresults to the dist/ folder. To re-run tests, run `grunt test` or `npm test`.\nYou can also run our set of benchmarks with `grunt bench`.\n\nThe `grunt dev` implements watching for tests and allows for in browser testing at `http://localhost:9999/spec/`.\n\nIf you notice any problems, please report them to the GitHub issue tracker at\n[http://github.com/wycats/handlebars.js/issues](http://github.com/wycats/handlebars.js/issues).\nFeel free to contact commondream or wycats through GitHub with any other\nquestions or feature requests. To submit changes fork the project and\nsend a pull request.\n\n### Ember testing\n\nThe current ember distribution should be tested as part of the handlebars release process. This requires building the `handlebars-source` gem locally and then executing the ember test script.\n\n```sh\ngrunt build release\nexport HANDLEBARS_PATH=`pwd`\n\ncd $emberRepoDir\nbundle exec rake clean\nbundle exec rake test\n```\n\n### Releasing\n\nHandlebars utilizes the [release yeoman generator][generator-release] to perform most release tasks.\n\nA full release may be completed with the following:\n\n```\nyo release:notes patch\nyo release:release patch\nnpm publish\nyo release:publish cdnjs handlebars.js dist/cdnjs/\nyo release:publish components handlebars.js dist/components/\n\ncd dist/components/\ngem build handlebars-source.gemspec\ngem push handlebars-source-*.gem\n```\n\nAfter this point the handlebars site needs to be updated to point to the new version numbers.\n\nLicense\n-------\nHandlebars.js is released under the MIT license.\n\n[bower-repo]: https://github.com/components/handlebars.js\n[builds-page]: http://builds.handlebarsjs.com.s3.amazonaws.com/bucket-listing.html?sort=lastmod&sortdir=desc\n[generator-release]: https://github.com/walmartlabs/generator-release\n[pull-request]: https://github.com/wycats/handlebars.js/pull/new/master\n", - "bugs": { - "url": "https://github.com/wycats/handlebars.js/issues" - }, - "_id": "handlebars@1.3.0", - "_from": "handlebars@1.x" -} diff --git a/node_modules/express3-handlebars/node_modules/semver/LICENSE b/node_modules/express3-handlebars/node_modules/semver/LICENSE deleted file mode 100644 index 0c44ae716..000000000 --- a/node_modules/express3-handlebars/node_modules/semver/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/express3-handlebars/node_modules/semver/package.json b/node_modules/express3-handlebars/node_modules/semver/package.json deleted file mode 100644 index 8b28a9af9..000000000 --- a/node_modules/express3-handlebars/node_modules/semver/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "semver", - "version": "2.2.1", - "description": "The semantic version parser used by npm.", - "main": "semver.js", - "browser": "semver.browser.js", - "min": "semver.min.js", - "scripts": { - "test": "tap test/*.js", - "prepublish": "make" - }, - "devDependencies": { - "tap": "0.x >=0.0.4", - "uglify-js": "~2.3.6" - }, - "license": "BSD", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-semver.git" - }, - "bin": { - "semver": "./bin/semver" - }, - "readme": "semver(1) -- The semantic versioner for npm\n===========================================\n\n## Usage\n\n $ npm install semver\n\n semver.valid('1.2.3') // '1.2.3'\n semver.valid('a.b.c') // null\n semver.clean(' =v1.2.3 ') // '1.2.3'\n semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true\n semver.gt('1.2.3', '9.8.7') // false\n semver.lt('1.2.3', '9.8.7') // true\n\nAs a command-line utility:\n\n $ semver -h\n\n Usage: semver [ [...]] [-r | -i | -d ]\n Test if version(s) satisfy the supplied range(s), and sort them.\n\n Multiple versions or ranges may be supplied, unless increment\n or decrement options are specified. In that case, only a single\n version may be used, and it is incremented by the specified level\n\n Program exits successfully if any valid version satisfies\n all supplied ranges, and prints all satisfying versions.\n\n If no versions are valid, or ranges are not satisfied,\n then exits failure.\n\n Versions are printed in ascending order, so supplying\n multiple versions to the utility will just sort them.\n\n## Versions\n\nA \"version\" is described by the v2.0.0 specification found at\n.\n\nA leading `\"=\"` or `\"v\"` character is stripped off and ignored.\n\n## Ranges\n\nThe following range styles are supported:\n\n* `1.2.3` A specific version. When nothing else will do. Note that\n build metadata is still ignored, so `1.2.3+build2012` will satisfy\n this range.\n* `>1.2.3` Greater than a specific version.\n* `<1.2.3` Less than a specific version. If there is no prerelease\n tag on the version range, then no prerelease version will be allowed\n either, even though these are technically \"less than\".\n* `>=1.2.3` Greater than or equal to. Note that prerelease versions\n are NOT equal to their \"normal\" equivalents, so `1.2.3-beta` will\n not satisfy this range, but `2.3.0-beta` will.\n* `<=1.2.3` Less than or equal to. In this case, prerelease versions\n ARE allowed, so `1.2.3-beta` would satisfy.\n* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`\n* `~1.2.3` := `>=1.2.3-0 <1.3.0-0` \"Reasonably close to 1.2.3\". When\n using tilde operators, prerelease versions are supported as well,\n but a prerelease of the next significant digit will NOT be\n satisfactory, so `1.3.0-beta` will not satisfy `~1.2.3`.\n* `^1.2.3` := `>=1.2.3-0 <2.0.0-0` \"Compatible with 1.2.3\". When\n using caret operators, anything from the specified version (including\n prerelease) will be supported up to, but not including, the next\n major version (or its prereleases). `1.5.1` will satisfy `^1.2.3`,\n while `1.2.2` and `2.0.0-beta` will not.\n* `^0.1.3` := `>=0.1.3-0 <0.2.0-0` \"Compatible with 0.1.3\". 0.x.x versions are\n special: the first non-zero component indicates potentially breaking changes,\n meaning the caret operator matches any version with the same first non-zero\n component starting at the specified version.\n* `^0.0.2` := `=0.0.2` \"Only the version 0.0.2 is considered compatible\"\n* `~1.2` := `>=1.2.0-0 <1.3.0-0` \"Any version starting with 1.2\"\n* `^1.2` := `>=1.2.0-0 <2.0.0-0` \"Any version compatible with 1.2\"\n* `1.2.x` := `>=1.2.0-0 <1.3.0-0` \"Any version starting with 1.2\"\n* `~1` := `>=1.0.0-0 <2.0.0-0` \"Any version starting with 1\"\n* `^1` := `>=1.0.0-0 <2.0.0-0` \"Any version compatible with 1\"\n* `1.x` := `>=1.0.0-0 <2.0.0-0` \"Any version starting with 1\"\n\n\nRanges can be joined with either a space (which implies \"and\") or a\n`||` (which implies \"or\").\n\n## Functions\n\nAll methods and classes take a final `loose` boolean argument that, if\ntrue, will be more forgiving about not-quite-valid semver strings.\nThe resulting output will always be 100% strict, of course.\n\nStrict-mode Comparators and Ranges will be strict about the SemVer\nstrings that they parse.\n\n* valid(v): Return the parsed version, or null if it's not valid.\n* inc(v, release): Return the version incremented by the release type\n (major, minor, patch, or prerelease), or null if it's not valid.\n\n### Comparison\n\n* gt(v1, v2): `v1 > v2`\n* gte(v1, v2): `v1 >= v2`\n* lt(v1, v2): `v1 < v2`\n* lte(v1, v2): `v1 <= v2`\n* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,\n even if they're not the exact same string. You already know how to\n compare strings.\n* neq(v1, v2): `v1 != v2` The opposite of eq.\n* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call\n the corresponding function above. `\"===\"` and `\"!==\"` do simple\n string comparison, but are included for completeness. Throws if an\n invalid comparison string is provided.\n* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if\n v2 is greater. Sorts in ascending order if passed to Array.sort().\n* rcompare(v1, v2): The reverse of compare. Sorts an array of versions\n in descending order when passed to Array.sort().\n\n\n### Ranges\n\n* validRange(range): Return the valid range or null if it's not valid\n* satisfies(version, range): Return true if the version satisfies the\n range.\n* maxSatisfying(versions, range): Return the highest version in the list\n that satisfies the range, or null if none of them do.\n* gtr(version, range): Return true if version is greater than all the\n versions possible in the range.\n* ltr(version, range): Return true if version is less than all the\n versions possible in the range.\n* outside(version, range, hilo): Return true if the version is outside\n the bounds of the range in either the high or low direction. The\n `hilo` argument must be either the string `'>'` or `'<'`. (This is\n the function called by `gtr` and `ltr`.)\n\nNote that, since ranges may be non-contiguous, a version might not be\ngreater than a range, less than a range, *or* satisfy a range! For\nexample, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`\nuntil `2.0.0`, so the version `1.2.10` would not be greater than the\nrange (because 2.0.1 satisfies, which is higher), nor less than the\nrange (since 1.2.8 satisfies, which is lower), and it also does not\nsatisfy the range.\n\nIf you want to know if a version satisfies or does not satisfy a\nrange, use the `satisfies(version, range)` function.\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/isaacs/node-semver/issues" - }, - "homepage": "https://github.com/isaacs/node-semver", - "_id": "semver@2.2.1", - "_from": "semver@2.x" -} diff --git a/node_modules/express3-handlebars/node_modules/semver/semver.min.js b/node_modules/express3-handlebars/node_modules/semver/semver.min.js deleted file mode 100644 index c2164c33e..000000000 --- a/node_modules/express3-handlebars/node_modules/semver/semver.min.js +++ /dev/null @@ -1 +0,0 @@ -(function(e){if(typeof module==="object"&&module.exports===e)e=module.exports=H;e.SEMVER_SPEC_VERSION="2.0.0";var r=e.re=[];var t=e.src=[];var n=0;var i=n++;t[i]="0|[1-9]\\d*";var s=n++;t[s]="[0-9]+";var o=n++;t[o]="\\d*[a-zA-Z-][a-zA-Z0-9-]*";var a=n++;t[a]="("+t[i]+")\\."+"("+t[i]+")\\."+"("+t[i]+")";var f=n++;t[f]="("+t[s]+")\\."+"("+t[s]+")\\."+"("+t[s]+")";var u=n++;t[u]="(?:"+t[i]+"|"+t[o]+")";var l=n++;t[l]="(?:"+t[s]+"|"+t[o]+")";var c=n++;t[c]="(?:-("+t[u]+"(?:\\."+t[u]+")*))";var p=n++;t[p]="(?:-?("+t[l]+"(?:\\."+t[l]+")*))";var h=n++;t[h]="[0-9A-Za-z-]+";var v=n++;t[v]="(?:\\+("+t[h]+"(?:\\."+t[h]+")*))";var m=n++;var g="v?"+t[a]+t[c]+"?"+t[v]+"?";t[m]="^"+g+"$";var w="[v=\\s]*"+t[f]+t[p]+"?"+t[v]+"?";var d=n++;t[d]="^"+w+"$";var y=n++;t[y]="((?:<|>)?=?)";var $=n++;t[$]=t[s]+"|x|X|\\*";var j=n++;t[j]=t[i]+"|x|X|\\*";var b=n++;t[b]="[v=\\s]*("+t[j]+")"+"(?:\\.("+t[j]+")"+"(?:\\.("+t[j]+")"+"(?:("+t[c]+")"+")?)?)?";var E=n++;t[E]="[v=\\s]*("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:("+t[p]+")"+")?)?)?";var k=n++;t[k]="^"+t[y]+"\\s*"+t[b]+"$";var x=n++;t[x]="^"+t[y]+"\\s*"+t[E]+"$";var R=n++;t[R]="(?:~>?)";var S=n++;t[S]="(\\s*)"+t[R]+"\\s+";r[S]=new RegExp(t[S],"g");var V="$1~";var I=n++;t[I]="^"+t[R]+t[b]+"$";var A=n++;t[A]="^"+t[R]+t[E]+"$";var C=n++;t[C]="(?:\\^)";var T=n++;t[T]="(\\s*)"+t[C]+"\\s+";r[T]=new RegExp(t[T],"g");var M="$1^";var z=n++;t[z]="^"+t[C]+t[b]+"$";var P=n++;t[P]="^"+t[C]+t[E]+"$";var Z=n++;t[Z]="^"+t[y]+"\\s*("+w+")$|^$";var q=n++;t[q]="^"+t[y]+"\\s*("+g+")$|^$";var L=n++;t[L]="(\\s*)"+t[y]+"\\s*("+w+"|"+t[b]+")";r[L]=new RegExp(t[L],"g");var X="$1$2$3";var _=n++;t[_]="^\\s*("+t[b]+")"+"\\s+-\\s+"+"("+t[b]+")"+"\\s*$";var N=n++;t[N]="^\\s*("+t[E]+")"+"\\s+-\\s+"+"("+t[E]+")"+"\\s*$";var O=n++;t[O]="(<|>)?=?\\s*\\*";for(var B=0;B'};H.prototype.toString=function(){return this.version};H.prototype.compare=function(e){if(!(e instanceof H))e=new H(e,this.loose);return this.compareMain(e)||this.comparePre(e)};H.prototype.compareMain=function(e){if(!(e instanceof H))e=new H(e,this.loose);return Q(this.major,e.major)||Q(this.minor,e.minor)||Q(this.patch,e.patch)};H.prototype.comparePre=function(e){if(!(e instanceof H))e=new H(e,this.loose);if(this.prerelease.length&&!e.prerelease.length)return-1;else if(!this.prerelease.length&&e.prerelease.length)return 1;else if(!this.prerelease.lenth&&!e.prerelease.length)return 0;var r=0;do{var t=this.prerelease[r];var n=e.prerelease[r];if(t===undefined&&n===undefined)return 0;else if(n===undefined)return 1;else if(t===undefined)return-1;else if(t===n)continue;else return Q(t,n)}while(++r)};H.prototype.inc=function(e){switch(e){case"major":this.major++;this.minor=-1;case"minor":this.minor++;this.patch=-1;case"patch":this.patch++;this.prerelease=[];break;case"prerelease":if(this.prerelease.length===0)this.prerelease=[0];else{var r=this.prerelease.length;while(--r>=0){if(typeof this.prerelease[r]==="number"){this.prerelease[r]++;r=-2}}if(r===-1)this.prerelease.push(0)}break;default:throw new Error("invalid increment argument: "+e)}this.format();return this};e.inc=J;function J(e,r,t){try{return new H(e,t).inc(r).version}catch(n){return null}}e.compareIdentifiers=Q;var K=/^[0-9]+$/;function Q(e,r){var t=K.test(e);var n=K.test(r);if(t&&n){e=+e;r=+r}return t&&!n?-1:n&&!t?1:er?1:0}e.rcompareIdentifiers=U;function U(e,r){return Q(r,e)}e.compare=W;function W(e,r,t){return new H(e,t).compare(r)}e.compareLoose=Y;function Y(e,r){return W(e,r,true)}e.rcompare=er;function er(e,r,t){return W(r,e,t)}e.sort=rr;function rr(r,t){return r.sort(function(r,n){return e.compare(r,n,t)})}e.rsort=tr;function tr(r,t){return r.sort(function(r,n){return e.rcompare(r,n,t)})}e.gt=nr;function nr(e,r,t){return W(e,r,t)>0}e.lt=ir;function ir(e,r,t){return W(e,r,t)<0}e.eq=sr;function sr(e,r,t){return W(e,r,t)===0}e.neq=or;function or(e,r,t){return W(e,r,t)!==0}e.gte=ar;function ar(e,r,t){return W(e,r,t)>=0}e.lte=fr;function fr(e,r,t){return W(e,r,t)<=0}e.cmp=ur;function ur(e,r,t,n){var i;switch(r){case"===":i=e===t;break;case"!==":i=e!==t;break;case"":case"=":case"==":i=sr(e,t,n);break;case"!=":i=or(e,t,n);break;case">":i=nr(e,t,n);break;case">=":i=ar(e,t,n);break;case"<":i=ir(e,t,n);break;case"<=":i=fr(e,t,n);break;default:throw new TypeError("Invalid operator: "+r)}return i}e.Comparator=lr;function lr(e,r){if(e instanceof lr){if(e.loose===r)return e;else e=e.value}if(!(this instanceof lr))return new lr(e,r);this.loose=r;this.parse(e);if(this.semver===cr)this.value="";else this.value=this.operator+this.semver.version}var cr={};lr.prototype.parse=function(e){var t=this.loose?r[Z]:r[q];var n=e.match(t);if(!n)throw new TypeError("Invalid comparator: "+e);this.operator=n[1];if(!n[2])this.semver=cr;else{this.semver=new H(n[2],this.loose);if(this.operator==="<"&&!this.semver.prerelease.length){this.semver.prerelease=["0"];this.semver.format()}}};lr.prototype.inspect=function(){return''};lr.prototype.toString=function(){return this.value};lr.prototype.test=function(e){return this.semver===cr?true:ur(e,this.operator,this.semver,this.loose)};e.Range=pr;function pr(e,r){if(e instanceof pr&&e.loose===r)return e;if(!(this instanceof pr))return new pr(e,r);this.loose=r;this.raw=e;this.set=e.split(/\s*\|\|\s*/).map(function(e){return this.parseRange(e.trim())},this).filter(function(e){return e.length});if(!this.set.length){throw new TypeError("Invalid SemVer Range: "+e)}this.format()}pr.prototype.inspect=function(){return''};pr.prototype.format=function(){this.range=this.set.map(function(e){return e.join(" ").trim()}).join("||").trim();return this.range};pr.prototype.toString=function(){return this.range};pr.prototype.parseRange=function(e){var t=this.loose;e=e.trim();var n=t?r[N]:r[_];e=e.replace(n,Er);e=e.replace(r[L],X);e=e.replace(r[S],V);e=e.replace(r[T],M);e=e.split(/\s+/).join(" ");var i=t?r[Z]:r[q];var s=e.split(" ").map(function(e){return vr(e,t)}).join(" ").split(/\s+/);if(this.loose){s=s.filter(function(e){return!!e.match(i)})}s=s.map(function(e){return new lr(e,t)});return s};e.toComparators=hr;function hr(e,r){return new pr(e,r).set.map(function(e){return e.map(function(e){return e.value}).join(" ").trim().split(" ")})}function vr(e,r){e=dr(e,r);e=gr(e,r);e=$r(e,r);e=br(e,r);return e}function mr(e){return!e||e.toLowerCase()==="x"||e==="*"}function gr(e,r){return e.trim().split(/\s+/).map(function(e){return wr(e,r)}).join(" ")}function wr(e,t){var n=t?r[A]:r[I];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n))s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else if(i){if(i.charAt(0)!=="-")i="-"+i;s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0";return s})}function dr(e,r){return e.trim().split(/\s+/).map(function(e){return yr(e,r)}).join(" ")}function yr(e,t){var n=t?r[P]:r[z];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n)){if(r==="0")s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else s=">="+r+"."+t+".0-0 <"+(+r+1)+".0.0-0"}else if(i){if(i.charAt(0)!=="-")i="-"+i;if(r==="0"){if(t==="0")s="="+r+"."+t+"."+n+i;else s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+i+" <"+(+r+1)+".0.0-0"}else{if(r==="0"){if(t==="0")s="="+r+"."+t+"."+n;else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+"-0"+" <"+(+r+1)+".0.0-0"}return s})}function $r(e,r){return e.split(/\s+/).map(function(e){return jr(e,r)}).join(" ")}function jr(e,t){e=e.trim();var n=t?r[x]:r[k];return e.replace(n,function(e,r,t,n,i,s){var o=mr(t);var a=o||mr(n);var f=a||mr(i);var u=f;if(r==="="&&u)r="";if(r&&u){if(o)t=0;if(a)n=0;if(f)i=0;if(r===">"){r=">=";if(o){}else if(a){t=+t+1;n=0;i=0}else if(f){n=+n+1;i=0}}e=r+t+"."+n+"."+i+"-0"}else if(o){e="*"}else if(a){e=">="+t+".0.0-0 <"+(+t+1)+".0.0-0"}else if(f){e=">="+t+"."+n+".0-0 <"+t+"."+(+n+1)+".0-0"}return e})}function br(e,t){return e.trim().replace(r[O],"")}function Er(e,r,t,n,i,s,o,a,f,u,l,c,p){if(mr(t))r="";else if(mr(n))r=">="+t+".0.0-0";else if(mr(i))r=">="+t+"."+n+".0-0";else r=">="+r;if(mr(f))a="";else if(mr(u))a="<"+(+f+1)+".0.0-0";else if(mr(l))a="<"+f+"."+(+u+1)+".0-0";else if(c)a="<="+f+"."+u+"."+l+"-"+c;else a="<="+a;return(r+" "+a).trim()}pr.prototype.test=function(e){if(!e)return false;for(var r=0;r",t)}e.outside=Ar;function Ar(e,r,t,n){e=new H(e,n);r=new pr(r,n);var i,s,o,a,f;switch(t){case">":i=nr;s=fr;o=ir;a=">";f=">=";break;case"<":i=ir;s=ar;o=nr;a="<";f="<=";break;default:throw new TypeError('Must provide a hilo val of "<" or ">"')}if(xr(e,r,n)){return false}for(var u=0;u=0.8" + }, + "dependencies": { + "async": "~0.2", + "glob": "3.x", + "handlebars": "1.x", + "semver": "2.x" + }, + "devDependencies": { + "express": "3.x" + }, + "main": "index.js", + "directories": { + "lib": "./lib" + }, + "readme": "Express3 Handlebars\n===================\n\nA [Handlebars][] view engine for [Express][] which doesn't suck.\n\n[![Dependency Status](https://david-dm.org/ericf/express3-handlebars.png)][status]\n\n\n[Express]: https://github.com/visionmedia/express\n[Handlebars]: https://github.com/wycats/handlebars.js\n[status]: https://david-dm.org/ericf/express3-handlebars\n\n\nGoals & Design\n--------------\n\nI created this project out of frustration with the existing Handlebars view\nengines for Express. As of version 3.x, Express got out of the business of being\na generic view engine — this was a great decision — leaving developers to\nimplement the concepts of layouts, partials, and doing file I/O for their\ntemplate engines of choice.\n\n### Goals and Features\n\nAfter building a half-dozen Express apps, I developed requirements and opinions\nabout what a Handlebars view engine should provide and how it should be\nimplemented. The following is that list:\n\n* Add back the concept of \"layout\", which was removed in Express 3.x.\n\n* Add back the concept of \"partials\" via Handlebars' partials mechanism.\n\n* Support a directory of partials; e.g., `{{> foo/bar}}` which exists on the\n file system at `views/partials/foo/bar.handlebars` by default.\n\n* Smart file system I/O and template caching. When in development, templates are\n always loaded from disk. In production, raw files and compiled templates are\n cached, including partials.\n\n* All async and non-blocking. File system I/O is slow and servers should not be\n blocked from handling requests while reading from disk. I/O queuing is used to\n avoid doing unnecessary work.\n\n* Ability to expose precompiled templates and partials to the client, enabling\n template sharing and reuse.\n\n* Ability to use a different Handlebars module/implementation other than the\n Handlebars npm package.\n\n### Package Design\n\nThis package was designed to work great for both the simple and complex use\ncases. I _intentionally_ made sure the full implementation is exposed and is\neasily overrideable.\n\nThe package exports a function which can be invoked with no arguments or with a\n`config` object and it will return a function (closed over sane defaults) which\ncan be registered with an Express app. It's an engine factory function.\n\nThis exported engine factory has two properties which expose the underlying\nimplementation:\n\n* `ExpressHandlebars()`: The constructor function which holds the internal\n implementation on its `prototype`. This produces instance objects which store\n their configuration, `compiled` and `precompiled` templates, and expose an\n `engine()` function which can be registered with an Express app.\n\n* `create()`: A convenience factory function for creating `ExpressHandlebars`\n instances.\n\nAn instance-based approach is used so that multiple `ExpressHandlebars`\ninstances can be created with their own configuration, templates, partials, and\nhelpers.\n\n\nInstallation\n------------\n\nInstall using npm:\n\n```shell\n$ npm install express3-handlebars\n```\n\n\nUsage\n-----\n\nThis view engine uses sane defaults that leverage the \"Express-way\" of\nstructuring an app's views. This makes it trivial to use in basic apps:\n\n### Basic Usage\n\n**Directory Structure:**\n\n```\n.\n├── app.js\n└── views\n ├── home.handlebars\n └── layouts\n └── main.handlebars\n\n2 directories, 3 files\n```\n\n**app.js:**\n\nCreates a super simple Express app which shows the basic way to register a\nHandlebars view engine using this package.\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express();\n\napp.engine('handlebars', exphbs({defaultLayout: 'main'}));\napp.set('view engine', 'handlebars');\n\napp.get('/', function (req, res) {\n res.render('home');\n});\n\napp.listen(3000);\n```\n\n**views/layouts/main.handlebars:**\n\nThe main layout is the HTML page wrapper which can be reused for the different\nviews of the app. `{{{body}}}` is used as a placeholder for where the main\ncontent should be rendered.\n\n```html\n\n\n\n \n Example App\n\n\n\n {{{body}}}\n\n\n\n```\n\n**views/home.handlebars:**\n\nThe content for the app's home view which will be rendered into the layout's\n`{{{body}}}`.\n\n```html\n

Example App: Home

\n```\n\n#### Running the Example\n\nThe above example is bundled in this package's [examples directory][], where\nit can be run by:\n\n```shell\n$ cd examples/basic/ && node app\n```\n\n### Using Instances\n\nAnother way to use this view engine is to create an instance(s) of\n`ExpressHandlebars`, allowing access to the full API:\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express(),\n hbs = exphbs.create({ /* config */ });\n\n// Register `hbs.engine` with the Express app.\napp.engine('handlebars', hbs.engine);\napp.set('view engine', 'handlebars');\n\n// ...still have a reference to `hbs`, on which methods like `loadPartials()`\n// can be called.\n```\n\n**Note:** The [Advanced Usage][] example demonstrates how `ExpressHandlebars`\ninstances can be leveraged.\n\n### Template Caching\n\nThis view engine uses a smart template caching strategy. In development,\ntemplates will always be loaded from disk, i.e., no caching. In production, raw\nfiles and compiled Handlebars templates are aggressively cached.\n\nThe easiest way to control template/view caching is through Express'\n[view cache setting][]:\n\n```javascript\napp.enable('view cache');\n```\n\nExpress enables this setting by default when in production mode, i.e.,\n`process.env.NODE_ENV === \"production\"`.\n\n**Note:** All of the public API methods accept `options.cache`, which gives\ncontrol over caching when calling these methods directly.\n\n### Layouts\n\nA layout is simply a Handlebars template with a `{{{body}}}` placeholder.\nUsually it will be an HTML page wrapper into which views will be rendered.\n\nThis view engine adds back the concept of \"layout\", which was removed in Express\n3.x. It can be configured with a path to the layouts directory, by default it's\nset to `\"views/layouts/\"`.\n\nThere are two ways to set a default layout: configuring the view engine's\n`defaultLayout` property, or setting [Express locals][] `app.locals.layout`.\n\nThe layout into which a view should be rendered can be overridden per-request\nby assigning a different value to the `layout` request local. The following\nwill render the \"home\" view with no layout:\n\n```javascript\napp.get('/', function (req, res, next) {\n res.render('home', {layout: false});\n});\n```\n\n### Helpers\n\nHelper functions, or \"helpers\" are functions that can be\n[registered with Handlebars][] and can be called within a template. Helpers can\nbe used for transforming output, iterating over data, etc. To keep with the\nspirit of *logic-less* templates, helpers are the place where logic should be\ndefined.\n\nHandlebars ships with some [built-in helpers][], such as: `with`, `if`, `each`,\netc. Most application will need to extend this set of helpers to include\napp-specific logic and transformations. Beyond defining global helpers on\n`Handlebars`, this view engine supports `ExpressHandlebars` instance-level\nhelpers via the `helpers` configuration property, and render-level helpers via\n`options.helpers` when calling the `render()` and `renderView()` methods.\n\nThe following example shows helpers being specified at each level:\n\n**app.js:**\n\nCreates a super simple Express app which shows the basic way to register\n`ExpressHandlebars` instance-level helpers, and override one at the\nrender-level.\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express(),\n hbs;\n\nhbs = exphbs.create({\n // Specify helpers which are only registered on this instance.\n helpers: {\n foo: function () { return 'FOO!'; }\n bar: function () { return 'BAR!'; }\n }\n});\n\napp.engine('handlebars', hbs.engine);\napp.set('view engine', 'handlebars');\n\napp.get('/', function (req, res, next) {\n res.render('home', {\n showTitle: true,\n\n // Override `foo` helper only for this rendering.\n helpers: {\n foo: function () { return 'foo.'; }\n }\n });\n});\n\napp.listen(3000);\n```\n\n**views/home.handlebars:**\n\nThe app's home view which uses helper functions to help render the contents.\n\n```html\n\n\n\n \n Example App - Home\n\n\n\n \n {{#if showTitle}}\n

Home

\n {{/if}}\n\n \n

{{foo}}

\n\n \n

{{bar}}

\n\n\n\n```\n\n#### More on Helpers\n\nRefer to the [Handlebars website][] for more information on defining helpers:\n\n* [Expression Helpers][]\n* [Block Helpers][]\n\n\n[examples directory]: https://github.com/ericf/express3-handlebars/tree/master/examples\n[view cache setting]: http://expressjs.com/api.html#app-settings\n[Express locals]: http://expressjs.com/api.html#app.locals\n[registered with Handlebars]: https://github.com/wycats/handlebars.js/#registering-helpers\n[built-in helpers]: http://handlebarsjs.com/#builtins\n[Handlebars website]: http://handlebarsjs.com/\n[Expression Helpers]: http://handlebarsjs.com/expressions.html#helpers\n[Block Helpers]: http://handlebarsjs.com/block_helpers.html\n\n\nAPI\n---\n\n### Configuration and Defaults\n\nThere are two main ways to use this package: via its engine factory function, or\ncreating `ExpressHandlebars` instances; both use the same configuration\nproperties and defaults.\n\n```javascript\nvar exphbs = require('express3-handlebars');\n\n// Using the engine factory:\nexphbs({ /* config */ });\n\n// Create an instance:\nexphbs.create({ /* config */ });\n```\n\nThe following is the list of configuration properties and their default values\n(if any):\n\n#### `defaultLayout`\nThe string name or path of a template in the `layoutsDir` to use as the default\nlayout. This is overridden by a `layout` specified in the app or response\n`locals`. **Note:** A falsy value will render without a layout; e.g.,\n`res.render('home', {layout: false});`.\n\n#### `extname=\".handlebars\"`\nThe string name of the file extension used by the templates.\n\n#### `handlebars=require('handlebars')`\nThe Handlebars module/implementation. This allows for the `ExpressHandlebars`\ninstance to use a different Handlebars module/implementation than that provided\nby the Handlebars npm package.\n\n#### `helpers`\nAn object which holds the helper functions used when rendering templates with\nthis `ExpressHandlebars` instance. When rendering a template, a collection of\nhelpers will be generated by merging: `handlebars.helpers` (global), `helpers`\n(instance), and `options.helpers` (render-level). This allows Handlebars'\n`registerHelper()` function to operate as expected, will providing two extra\nlevels over helper overrides.\n\n#### `layoutsDir=\"views/layouts/\"`\nThe string path to the directory where the layout templates reside.\n\n#### `partialsDir=\"views/partials/\"`\nThe string path to the directory where the partials templates reside.\n\n### Properties\n\nThe public API properties are provided via `ExpressHandlebars` instances. In\nadditional to the properties listed in the **Configuration and Defaults**\nsection, the following are additional public properties:\n\n#### `compiled`\nAn object cache which holds compiled Handlebars template functions in the\nformat: `{\"path/to/template\": [Function]}`.\n\n#### `engine`\nA function reference to the `renderView()` method which is bound to `this`\n`ExpressHandlebars` instance. This bound function should be used when\nregistering this view engine with an Express app.\n\n#### `handlebarsVersion`\nThe version number of `handlebars` as a semver. This is unsed internally to\nbranch on certain operations which differ between Handlebars releases.\n\n#### `precompiled`\nAn object cache which holds precompiled Handlebars template strings in the\nformat: `{\"path/to/template\": [String]}`.\n\n### Methods\n\nThe following is the list of public API methods provided via `ExpressHandlebars`\ninstances:\n\n#### `loadPartials(options|callback, [callback])`\n\nRetrieves the partials in the `partialsDir` and passes an object mapping the\npartials in the form `{name: partial}` to the `callback`.\n\nBy default each partial will be a compiled Handlebars template function. Use\n`options.precompiled` to receive the partials as precompiled templates — this is\nuseful for sharing templates with client code.\n\n**Parameters:**\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether cached templates can be used if they have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[precompiled=false]`: Whether precompiled templates should be provided,\n instead of compiled Handlebars template functions.\n\n* `callback`: Function to call once the partials are retrieved.\n\nThe name of each partial corresponds to its location in `partialsDir`. For\nexample, consider the following directory structure:\n\n```\nviews\n└── partials\n ├── foo\n │   └── bar.handlebars\n └── title.handlebars\n\n2 directories, 2 files\n```\n\n`loadPartials()` would produce the following result:\n\n```javascript\nvar hbs = require('express3-handlebars').create();\n\nhbs.loadPartials(function (err, partials) {\n console.log(partials);\n // => { 'foo.bar': [Function],\n // => title: [Function] }\n});\n```\n\n**Note:** The partial name `\"foo.bar\"` would ideally be `\"foo/bar\"`, but this is\nbeing prevented by a [Handlebars bug][]. Once this bug is fixed, a future\nversion will use a \"/\" separator. Templates requiring the partial still use:\n`{{> foo/bar}}`.\n\n#### `loadTemplate(filePath, options|callback, [callback])`\n\nRetrieves the template at the specified `filePath` and passes a compiled\nHandlebars template function to the `callback`.\n\nUse `options.precompiled` to receive a precompiled Handlebars template.\n\n**Parameters:**\n\n* `filePath`: String path to the Handlebars template file.\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether a cached template can be used if it have already been\n requested. This is recommended for production to avoid necessary file I/O.\n\n * `[precompiled=false]`: Whether a precompiled template should be provided,\n instead of a compiled Handlebars template function.\n\n* `callback`: Function to call once the template is retrieved.\n\n#### `loadTemplates(dirPath, options|callback, [callback])`\n\nRetrieves the all the templates in the specified `dirPath` and passes an object\nmapping the compiled templates in the form `{filename: template}` to the\n`callback`.\n\nUse `options.precompiled` to receive precompiled Handlebars templates — this is\nuseful for sharing templates with client code.\n\n**Parameters:**\n\n* `dirPath`: String path to the directory containing Handlebars template files.\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether cached templates can be used if it have already been\n requested. This is recommended for production to avoid necessary file I/O.\n\n * `[precompiled=false]`: Whether precompiled templates should be provided,\n instead of a compiled Handlebars template function.\n\n* `callback`: Function to call once the templates are retrieved.\n\n#### `render(filePath, options|callback, [callback])`\n\nRenders the template at the specified `filePath` using this instance's `helpers`\nand partials, and passes the resulting string to the `callback`.\n\nThe `options` will be used both as the context in which the Handlebars template\nis rendered, and to signal this view engine on how it should behave, e.g.,\n`options.cache = false` will load _always_ load the templates from disk.\n\n**Parameters:**\n\n* `filePath`: String path to the Handlebars template file.\n\n* `[options]`: Optional object which will serve as the context in which the\n Handlebars template is rendered. It may also contain any of the following\n properties which affect this view engine's behavior:\n\n * `[cache]`: Whether a cached template can be used if it have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[helpers]`: Render-level helpers should be merged with (and will override)\n instance and global helper functions.\n\n* `callback`: Function to call once the template is retrieved.\n\n#### `renderView(viewPath, options|callback, [callback])`\n\nRenders the template at the specified `viewPath` as the `{{{body}}}` within the\nlayout specified by the `defaultLayout` or `options.layout`. Rendering will use\nthis instance's `helpers` and partials, and passes the resulting string to the\n`callback`.\n\nThis method is called by Express and is the main entry point into this Express\nview engine implementation. It adds the concept of a \"layout\" and delegates\nrendering to the `render()` method.\n\nThe `options` will be used both as the context in which the Handlebars templates\nare rendered, and to signal this view engine on how it should behave, e.g.,\n`options.cache=false` will load _always_ load the templates from disk.\n\n**Parameters:**\n\n* `viewPath`: String path to the Handlebars template file which should serve as\n the `{{{body}}}` when using a layout.\n\n* `[options]`: Optional object which will serve as the context in which the\n Handlebars templates are rendered. It may also contain any of the following\n properties which affect this view engine's behavior:\n\n * `[cache]`: Whether cached templates can be used if they have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[helpers]`: Render-level helpers should be merged with (and will override)\n instance and global helper functions.\n\n * `[layout]`: Optional string path to the Handlebars template file to be used\n as the \"layout\". This overrides any `defaultLayout` value. Passing a falsy\n value will render with no layout (even if a `defaultLayout` is defined).\n\n* `callback`: Function to call once the template is retrieved.\n\n### Statics\n\nThe following is the list of static API properties and methods provided on the\n`ExpressHandlebars` constructor:\n\n#### `getHandlebarsSemver(handlebars)`\n\nReturns a semver-compatible version string for the specified `handlebars`\nmodule/implementation.\n\nThis utility function is used to compute the value for an `ExpressHandlebars`\ninstance's `handlebarsVersion` property.\n\n\n[Handlebars bug]: https://github.com/wycats/handlebars.js/pull/389\n\n\nExamples\n--------\n\n### [Basic Usage][]\n\nThis example shows the most basic way to use this view engine.\n\n### [Advanced Usage][]\n\nThis example is more comprehensive and shows how to use many of the features of\nthis view engine, including helpers, partials, multiple layouts, etc.\n\nAs noted in the **Package Design** section, this view engine's implementation is\ninstance-based, and more advanced usages can take advantage of this. The\nAdvanced Usage example demonstrates how to use an `ExpressHandlebars` instance\nto share templates with the client, among other features.\n\n\n[Basic Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/basic\n[Advanced Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/advanced\n\n\nLicense\n-------\n\nThis software is free to use under the Yahoo! Inc. BSD license.\nSee the [LICENSE file][] for license text and copyright information.\n\n\n[LICENSE file]: https://github.com/ericf/express3-handlebars/blob/master/LICENSE\n", + "readmeFilename": "README.md", + "_id": "express3-handlebars@0.5.0", + "_from": "express3-handlebars@*" +} diff --git a/node_modules/express3-handlebars/package.json b/node_modules/express3-handlebars/package.json index 234a58d0b..4809aeb5a 100644 --- a/node_modules/express3-handlebars/package.json +++ b/node_modules/express3-handlebars/package.json @@ -1,46 +1,67 @@ { - "name": "express3-handlebars", - "description": "A Handlebars view engine for Express which doesn't suck.", - "version": "0.5.0", - "homepage": "https://github.com/ericf/express3-handlebars", - "keywords": [ - "express", - "express3", - "handlebars", - "view", - "layout", - "partials" + "_from": "express3-handlebars@0.5.0", + "_id": "express3-handlebars@0.5.0", + "_inBundle": false, + "_integrity": "sha1-f3f++YOM762WfOGOdf0JL7TDE8I=", + "_location": "/express3-handlebars", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "express3-handlebars@0.5.0", + "name": "express3-handlebars", + "escapedName": "express3-handlebars", + "rawSpec": "0.5.0", + "saveSpec": null, + "fetchSpec": "0.5.0" + }, + "_requiredBy": [ + "/" ], + "_resolved": "https://registry.npmjs.org/express3-handlebars/-/express3-handlebars-0.5.0.tgz", + "_shasum": "7f77fef9838cefad967ce18e75fd092fb4c313c2", + "_spec": "express3-handlebars@0.5.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton", "author": { "name": "Eric Ferraiuolo", "email": "eferraiuolo@gmail.com", "url": "http://ericf.me/" }, - "repository": { - "type": "git", - "url": "git://github.com/ericf/express3-handlebars.git" - }, "bugs": { "url": "https://github.com/ericf/express3-handlebars/issues" }, - "engines": { - "node": ">=0.8" - }, + "bundleDependencies": false, "dependencies": { "async": "~0.2", "glob": "3.x", "handlebars": "1.x", "semver": "2.x" }, + "deprecated": "THIS PACKAGE HAS BEEN RENAMED TO: express-handlebars", + "description": "A Handlebars view engine for Express which doesn't suck.", "devDependencies": { "express": "3.x" }, - "main": "index.js", "directories": { "lib": "./lib" }, - "readme": "Express3 Handlebars\n===================\n\nA [Handlebars][] view engine for [Express][] which doesn't suck.\n\n[![Dependency Status](https://david-dm.org/ericf/express3-handlebars.png)][status]\n\n\n[Express]: https://github.com/visionmedia/express\n[Handlebars]: https://github.com/wycats/handlebars.js\n[status]: https://david-dm.org/ericf/express3-handlebars\n\n\nGoals & Design\n--------------\n\nI created this project out of frustration with the existing Handlebars view\nengines for Express. As of version 3.x, Express got out of the business of being\na generic view engine — this was a great decision — leaving developers to\nimplement the concepts of layouts, partials, and doing file I/O for their\ntemplate engines of choice.\n\n### Goals and Features\n\nAfter building a half-dozen Express apps, I developed requirements and opinions\nabout what a Handlebars view engine should provide and how it should be\nimplemented. The following is that list:\n\n* Add back the concept of \"layout\", which was removed in Express 3.x.\n\n* Add back the concept of \"partials\" via Handlebars' partials mechanism.\n\n* Support a directory of partials; e.g., `{{> foo/bar}}` which exists on the\n file system at `views/partials/foo/bar.handlebars` by default.\n\n* Smart file system I/O and template caching. When in development, templates are\n always loaded from disk. In production, raw files and compiled templates are\n cached, including partials.\n\n* All async and non-blocking. File system I/O is slow and servers should not be\n blocked from handling requests while reading from disk. I/O queuing is used to\n avoid doing unnecessary work.\n\n* Ability to expose precompiled templates and partials to the client, enabling\n template sharing and reuse.\n\n* Ability to use a different Handlebars module/implementation other than the\n Handlebars npm package.\n\n### Package Design\n\nThis package was designed to work great for both the simple and complex use\ncases. I _intentionally_ made sure the full implementation is exposed and is\neasily overrideable.\n\nThe package exports a function which can be invoked with no arguments or with a\n`config` object and it will return a function (closed over sane defaults) which\ncan be registered with an Express app. It's an engine factory function.\n\nThis exported engine factory has two properties which expose the underlying\nimplementation:\n\n* `ExpressHandlebars()`: The constructor function which holds the internal\n implementation on its `prototype`. This produces instance objects which store\n their configuration, `compiled` and `precompiled` templates, and expose an\n `engine()` function which can be registered with an Express app.\n\n* `create()`: A convenience factory function for creating `ExpressHandlebars`\n instances.\n\nAn instance-based approach is used so that multiple `ExpressHandlebars`\ninstances can be created with their own configuration, templates, partials, and\nhelpers.\n\n\nInstallation\n------------\n\nInstall using npm:\n\n```shell\n$ npm install express3-handlebars\n```\n\n\nUsage\n-----\n\nThis view engine uses sane defaults that leverage the \"Express-way\" of\nstructuring an app's views. This makes it trivial to use in basic apps:\n\n### Basic Usage\n\n**Directory Structure:**\n\n```\n.\n├── app.js\n└── views\n ├── home.handlebars\n └── layouts\n └── main.handlebars\n\n2 directories, 3 files\n```\n\n**app.js:**\n\nCreates a super simple Express app which shows the basic way to register a\nHandlebars view engine using this package.\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express();\n\napp.engine('handlebars', exphbs({defaultLayout: 'main'}));\napp.set('view engine', 'handlebars');\n\napp.get('/', function (req, res) {\n res.render('home');\n});\n\napp.listen(3000);\n```\n\n**views/layouts/main.handlebars:**\n\nThe main layout is the HTML page wrapper which can be reused for the different\nviews of the app. `{{{body}}}` is used as a placeholder for where the main\ncontent should be rendered.\n\n```html\n\n\n\n \n Example App\n\n\n\n {{{body}}}\n\n\n\n```\n\n**views/home.handlebars:**\n\nThe content for the app's home view which will be rendered into the layout's\n`{{{body}}}`.\n\n```html\n

Example App: Home

\n```\n\n#### Running the Example\n\nThe above example is bundled in this package's [examples directory][], where\nit can be run by:\n\n```shell\n$ cd examples/basic/ && node app\n```\n\n### Using Instances\n\nAnother way to use this view engine is to create an instance(s) of\n`ExpressHandlebars`, allowing access to the full API:\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express(),\n hbs = exphbs.create({ /* config */ });\n\n// Register `hbs.engine` with the Express app.\napp.engine('handlebars', hbs.engine);\napp.set('view engine', 'handlebars');\n\n// ...still have a reference to `hbs`, on which methods like `loadPartials()`\n// can be called.\n```\n\n**Note:** The [Advanced Usage][] example demonstrates how `ExpressHandlebars`\ninstances can be leveraged.\n\n### Template Caching\n\nThis view engine uses a smart template caching strategy. In development,\ntemplates will always be loaded from disk, i.e., no caching. In production, raw\nfiles and compiled Handlebars templates are aggressively cached.\n\nThe easiest way to control template/view caching is through Express'\n[view cache setting][]:\n\n```javascript\napp.enable('view cache');\n```\n\nExpress enables this setting by default when in production mode, i.e.,\n`process.env.NODE_ENV === \"production\"`.\n\n**Note:** All of the public API methods accept `options.cache`, which gives\ncontrol over caching when calling these methods directly.\n\n### Layouts\n\nA layout is simply a Handlebars template with a `{{{body}}}` placeholder.\nUsually it will be an HTML page wrapper into which views will be rendered.\n\nThis view engine adds back the concept of \"layout\", which was removed in Express\n3.x. It can be configured with a path to the layouts directory, by default it's\nset to `\"views/layouts/\"`.\n\nThere are two ways to set a default layout: configuring the view engine's\n`defaultLayout` property, or setting [Express locals][] `app.locals.layout`.\n\nThe layout into which a view should be rendered can be overridden per-request\nby assigning a different value to the `layout` request local. The following\nwill render the \"home\" view with no layout:\n\n```javascript\napp.get('/', function (req, res, next) {\n res.render('home', {layout: false});\n});\n```\n\n### Helpers\n\nHelper functions, or \"helpers\" are functions that can be\n[registered with Handlebars][] and can be called within a template. Helpers can\nbe used for transforming output, iterating over data, etc. To keep with the\nspirit of *logic-less* templates, helpers are the place where logic should be\ndefined.\n\nHandlebars ships with some [built-in helpers][], such as: `with`, `if`, `each`,\netc. Most application will need to extend this set of helpers to include\napp-specific logic and transformations. Beyond defining global helpers on\n`Handlebars`, this view engine supports `ExpressHandlebars` instance-level\nhelpers via the `helpers` configuration property, and render-level helpers via\n`options.helpers` when calling the `render()` and `renderView()` methods.\n\nThe following example shows helpers being specified at each level:\n\n**app.js:**\n\nCreates a super simple Express app which shows the basic way to register\n`ExpressHandlebars` instance-level helpers, and override one at the\nrender-level.\n\n```javascript\nvar express = require('express'),\n exphbs = require('express3-handlebars'),\n\n app = express(),\n hbs;\n\nhbs = exphbs.create({\n // Specify helpers which are only registered on this instance.\n helpers: {\n foo: function () { return 'FOO!'; }\n bar: function () { return 'BAR!'; }\n }\n});\n\napp.engine('handlebars', hbs.engine);\napp.set('view engine', 'handlebars');\n\napp.get('/', function (req, res, next) {\n res.render('home', {\n showTitle: true,\n\n // Override `foo` helper only for this rendering.\n helpers: {\n foo: function () { return 'foo.'; }\n }\n });\n});\n\napp.listen(3000);\n```\n\n**views/home.handlebars:**\n\nThe app's home view which uses helper functions to help render the contents.\n\n```html\n\n\n\n \n Example App - Home\n\n\n\n \n {{#if showTitle}}\n

Home

\n {{/if}}\n\n \n

{{foo}}

\n\n \n

{{bar}}

\n\n\n\n```\n\n#### More on Helpers\n\nRefer to the [Handlebars website][] for more information on defining helpers:\n\n* [Expression Helpers][]\n* [Block Helpers][]\n\n\n[examples directory]: https://github.com/ericf/express3-handlebars/tree/master/examples\n[view cache setting]: http://expressjs.com/api.html#app-settings\n[Express locals]: http://expressjs.com/api.html#app.locals\n[registered with Handlebars]: https://github.com/wycats/handlebars.js/#registering-helpers\n[built-in helpers]: http://handlebarsjs.com/#builtins\n[Handlebars website]: http://handlebarsjs.com/\n[Expression Helpers]: http://handlebarsjs.com/expressions.html#helpers\n[Block Helpers]: http://handlebarsjs.com/block_helpers.html\n\n\nAPI\n---\n\n### Configuration and Defaults\n\nThere are two main ways to use this package: via its engine factory function, or\ncreating `ExpressHandlebars` instances; both use the same configuration\nproperties and defaults.\n\n```javascript\nvar exphbs = require('express3-handlebars');\n\n// Using the engine factory:\nexphbs({ /* config */ });\n\n// Create an instance:\nexphbs.create({ /* config */ });\n```\n\nThe following is the list of configuration properties and their default values\n(if any):\n\n#### `defaultLayout`\nThe string name or path of a template in the `layoutsDir` to use as the default\nlayout. This is overridden by a `layout` specified in the app or response\n`locals`. **Note:** A falsy value will render without a layout; e.g.,\n`res.render('home', {layout: false});`.\n\n#### `extname=\".handlebars\"`\nThe string name of the file extension used by the templates.\n\n#### `handlebars=require('handlebars')`\nThe Handlebars module/implementation. This allows for the `ExpressHandlebars`\ninstance to use a different Handlebars module/implementation than that provided\nby the Handlebars npm package.\n\n#### `helpers`\nAn object which holds the helper functions used when rendering templates with\nthis `ExpressHandlebars` instance. When rendering a template, a collection of\nhelpers will be generated by merging: `handlebars.helpers` (global), `helpers`\n(instance), and `options.helpers` (render-level). This allows Handlebars'\n`registerHelper()` function to operate as expected, will providing two extra\nlevels over helper overrides.\n\n#### `layoutsDir=\"views/layouts/\"`\nThe string path to the directory where the layout templates reside.\n\n#### `partialsDir=\"views/partials/\"`\nThe string path to the directory where the partials templates reside.\n\n### Properties\n\nThe public API properties are provided via `ExpressHandlebars` instances. In\nadditional to the properties listed in the **Configuration and Defaults**\nsection, the following are additional public properties:\n\n#### `compiled`\nAn object cache which holds compiled Handlebars template functions in the\nformat: `{\"path/to/template\": [Function]}`.\n\n#### `engine`\nA function reference to the `renderView()` method which is bound to `this`\n`ExpressHandlebars` instance. This bound function should be used when\nregistering this view engine with an Express app.\n\n#### `handlebarsVersion`\nThe version number of `handlebars` as a semver. This is unsed internally to\nbranch on certain operations which differ between Handlebars releases.\n\n#### `precompiled`\nAn object cache which holds precompiled Handlebars template strings in the\nformat: `{\"path/to/template\": [String]}`.\n\n### Methods\n\nThe following is the list of public API methods provided via `ExpressHandlebars`\ninstances:\n\n#### `loadPartials(options|callback, [callback])`\n\nRetrieves the partials in the `partialsDir` and passes an object mapping the\npartials in the form `{name: partial}` to the `callback`.\n\nBy default each partial will be a compiled Handlebars template function. Use\n`options.precompiled` to receive the partials as precompiled templates — this is\nuseful for sharing templates with client code.\n\n**Parameters:**\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether cached templates can be used if they have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[precompiled=false]`: Whether precompiled templates should be provided,\n instead of compiled Handlebars template functions.\n\n* `callback`: Function to call once the partials are retrieved.\n\nThe name of each partial corresponds to its location in `partialsDir`. For\nexample, consider the following directory structure:\n\n```\nviews\n└── partials\n ├── foo\n │   └── bar.handlebars\n └── title.handlebars\n\n2 directories, 2 files\n```\n\n`loadPartials()` would produce the following result:\n\n```javascript\nvar hbs = require('express3-handlebars').create();\n\nhbs.loadPartials(function (err, partials) {\n console.log(partials);\n // => { 'foo.bar': [Function],\n // => title: [Function] }\n});\n```\n\n**Note:** The partial name `\"foo.bar\"` would ideally be `\"foo/bar\"`, but this is\nbeing prevented by a [Handlebars bug][]. Once this bug is fixed, a future\nversion will use a \"/\" separator. Templates requiring the partial still use:\n`{{> foo/bar}}`.\n\n#### `loadTemplate(filePath, options|callback, [callback])`\n\nRetrieves the template at the specified `filePath` and passes a compiled\nHandlebars template function to the `callback`.\n\nUse `options.precompiled` to receive a precompiled Handlebars template.\n\n**Parameters:**\n\n* `filePath`: String path to the Handlebars template file.\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether a cached template can be used if it have already been\n requested. This is recommended for production to avoid necessary file I/O.\n\n * `[precompiled=false]`: Whether a precompiled template should be provided,\n instead of a compiled Handlebars template function.\n\n* `callback`: Function to call once the template is retrieved.\n\n#### `loadTemplates(dirPath, options|callback, [callback])`\n\nRetrieves the all the templates in the specified `dirPath` and passes an object\nmapping the compiled templates in the form `{filename: template}` to the\n`callback`.\n\nUse `options.precompiled` to receive precompiled Handlebars templates — this is\nuseful for sharing templates with client code.\n\n**Parameters:**\n\n* `dirPath`: String path to the directory containing Handlebars template files.\n\n* `[options]`: Optional object containing any of the following properties:\n\n * `[cache]`: Whether cached templates can be used if it have already been\n requested. This is recommended for production to avoid necessary file I/O.\n\n * `[precompiled=false]`: Whether precompiled templates should be provided,\n instead of a compiled Handlebars template function.\n\n* `callback`: Function to call once the templates are retrieved.\n\n#### `render(filePath, options|callback, [callback])`\n\nRenders the template at the specified `filePath` using this instance's `helpers`\nand partials, and passes the resulting string to the `callback`.\n\nThe `options` will be used both as the context in which the Handlebars template\nis rendered, and to signal this view engine on how it should behave, e.g.,\n`options.cache = false` will load _always_ load the templates from disk.\n\n**Parameters:**\n\n* `filePath`: String path to the Handlebars template file.\n\n* `[options]`: Optional object which will serve as the context in which the\n Handlebars template is rendered. It may also contain any of the following\n properties which affect this view engine's behavior:\n\n * `[cache]`: Whether a cached template can be used if it have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[helpers]`: Render-level helpers should be merged with (and will override)\n instance and global helper functions.\n\n* `callback`: Function to call once the template is retrieved.\n\n#### `renderView(viewPath, options|callback, [callback])`\n\nRenders the template at the specified `viewPath` as the `{{{body}}}` within the\nlayout specified by the `defaultLayout` or `options.layout`. Rendering will use\nthis instance's `helpers` and partials, and passes the resulting string to the\n`callback`.\n\nThis method is called by Express and is the main entry point into this Express\nview engine implementation. It adds the concept of a \"layout\" and delegates\nrendering to the `render()` method.\n\nThe `options` will be used both as the context in which the Handlebars templates\nare rendered, and to signal this view engine on how it should behave, e.g.,\n`options.cache=false` will load _always_ load the templates from disk.\n\n**Parameters:**\n\n* `viewPath`: String path to the Handlebars template file which should serve as\n the `{{{body}}}` when using a layout.\n\n* `[options]`: Optional object which will serve as the context in which the\n Handlebars templates are rendered. It may also contain any of the following\n properties which affect this view engine's behavior:\n\n * `[cache]`: Whether cached templates can be used if they have already been\n requested. This is recommended for production to avoid unnecessary file I/O.\n\n * `[helpers]`: Render-level helpers should be merged with (and will override)\n instance and global helper functions.\n\n * `[layout]`: Optional string path to the Handlebars template file to be used\n as the \"layout\". This overrides any `defaultLayout` value. Passing a falsy\n value will render with no layout (even if a `defaultLayout` is defined).\n\n* `callback`: Function to call once the template is retrieved.\n\n### Statics\n\nThe following is the list of static API properties and methods provided on the\n`ExpressHandlebars` constructor:\n\n#### `getHandlebarsSemver(handlebars)`\n\nReturns a semver-compatible version string for the specified `handlebars`\nmodule/implementation.\n\nThis utility function is used to compute the value for an `ExpressHandlebars`\ninstance's `handlebarsVersion` property.\n\n\n[Handlebars bug]: https://github.com/wycats/handlebars.js/pull/389\n\n\nExamples\n--------\n\n### [Basic Usage][]\n\nThis example shows the most basic way to use this view engine.\n\n### [Advanced Usage][]\n\nThis example is more comprehensive and shows how to use many of the features of\nthis view engine, including helpers, partials, multiple layouts, etc.\n\nAs noted in the **Package Design** section, this view engine's implementation is\ninstance-based, and more advanced usages can take advantage of this. The\nAdvanced Usage example demonstrates how to use an `ExpressHandlebars` instance\nto share templates with the client, among other features.\n\n\n[Basic Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/basic\n[Advanced Usage]: https://github.com/ericf/express3-handlebars/tree/master/examples/advanced\n\n\nLicense\n-------\n\nThis software is free to use under the Yahoo! Inc. BSD license.\nSee the [LICENSE file][] for license text and copyright information.\n\n\n[LICENSE file]: https://github.com/ericf/express3-handlebars/blob/master/LICENSE\n", - "readmeFilename": "README.md", - "_id": "express3-handlebars@0.5.0", - "_from": "express3-handlebars@*" + "engines": { + "node": ">=0.8" + }, + "homepage": "https://github.com/ericf/express3-handlebars", + "keywords": [ + "express", + "express3", + "handlebars", + "view", + "layout", + "partials" + ], + "main": "index.js", + "name": "express3-handlebars", + "repository": { + "type": "git", + "url": "git://github.com/ericf/express3-handlebars.git" + }, + "version": "0.5.0" } diff --git a/node_modules/express/node_modules/fresh/.npmignore b/node_modules/fresh/.npmignore similarity index 100% rename from node_modules/express/node_modules/fresh/.npmignore rename to node_modules/fresh/.npmignore diff --git a/node_modules/express/node_modules/fresh/History.md b/node_modules/fresh/History.md similarity index 100% rename from node_modules/express/node_modules/fresh/History.md rename to node_modules/fresh/History.md diff --git a/node_modules/express/node_modules/fresh/Makefile b/node_modules/fresh/Makefile similarity index 100% rename from node_modules/express/node_modules/fresh/Makefile rename to node_modules/fresh/Makefile diff --git a/node_modules/express/node_modules/fresh/Readme.md b/node_modules/fresh/Readme.md similarity index 100% rename from node_modules/express/node_modules/fresh/Readme.md rename to node_modules/fresh/Readme.md diff --git a/node_modules/express/node_modules/fresh/index.js b/node_modules/fresh/index.js similarity index 100% rename from node_modules/express/node_modules/fresh/index.js rename to node_modules/fresh/index.js diff --git a/node_modules/fresh/package.json b/node_modules/fresh/package.json new file mode 100644 index 000000000..76ec0e53b --- /dev/null +++ b/node_modules/fresh/package.json @@ -0,0 +1,51 @@ +{ + "_from": "fresh@0.2.0", + "_id": "fresh@0.2.0", + "_inBundle": false, + "_integrity": "sha1-v9lALPPfEsSkwxDHn5mj3eE9NKc=", + "_location": "/fresh", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "fresh@0.2.0", + "name": "fresh", + "escapedName": "fresh", + "rawSpec": "0.2.0", + "saveSpec": null, + "fetchSpec": "0.2.0" + }, + "_requiredBy": [ + "/connect", + "/express", + "/send" + ], + "_resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "_shasum": "bfd9402cf3df12c4a4c310c79f99a3dde13d34a7", + "_spec": "fresh@0.2.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bugs": { + "url": "https://github.com/visionmedia/node-fresh/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "HTTP response freshness testing", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "homepage": "https://github.com/visionmedia/node-fresh#readme", + "main": "index.js", + "name": "fresh", + "repository": { + "type": "git", + "url": "git+https://github.com/visionmedia/node-fresh.git" + }, + "version": "0.2.0" +} diff --git a/node_modules/express3-handlebars/node_modules/glob/.npmignore b/node_modules/glob/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/.npmignore rename to node_modules/glob/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/glob/.travis.yml b/node_modules/glob/.travis.yml similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/.travis.yml rename to node_modules/glob/.travis.yml diff --git a/node_modules/express3-handlebars/node_modules/glob/LICENSE b/node_modules/glob/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/LICENSE rename to node_modules/glob/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/glob/README.md b/node_modules/glob/README.md similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/README.md rename to node_modules/glob/README.md diff --git a/node_modules/express3-handlebars/node_modules/glob/examples/g.js b/node_modules/glob/examples/g.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/examples/g.js rename to node_modules/glob/examples/g.js diff --git a/node_modules/express3-handlebars/node_modules/glob/examples/usr-local.js b/node_modules/glob/examples/usr-local.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/examples/usr-local.js rename to node_modules/glob/examples/usr-local.js diff --git a/node_modules/express3-handlebars/node_modules/glob/glob.js b/node_modules/glob/glob.js similarity index 89% rename from node_modules/express3-handlebars/node_modules/glob/glob.js rename to node_modules/glob/glob.js index f4c69e973..f646c4483 100644 --- a/node_modules/express3-handlebars/node_modules/glob/glob.js +++ b/node_modules/glob/glob.js @@ -76,6 +76,7 @@ function globSync (pattern, options) { return glob(pattern, options) } +this._processingEmitQueue = false glob.Glob = Glob inherits(Glob, EE) @@ -98,9 +99,13 @@ function Glob (pattern, options, cb) { options = options || {} + this._endEmitted = false this.EOF = {} this._emitQueue = [] + this.paused = false + this._processingEmitQueue = false + this.maxDepth = options.maxDepth || 1000 this.maxLength = options.maxLength || Infinity this.cache = options.cache || {} @@ -214,10 +219,15 @@ Glob.prototype._finish = function () { all = all.sort(this.nocase ? alphasorti : alphasort) } + if (this.mark) { + // at *some* point we statted all of these + all = all.map(this._mark, this) + } + this.log("emitting end", all) this.EOF = this.found = all - this.emitMatch(this.EOF, -1) + this.emitMatch(this.EOF) } function alphasorti (a, b) { @@ -230,29 +240,6 @@ function alphasort (a, b) { return a > b ? 1 : a < b ? -1 : 0 } -Glob.prototype.abort = function () { - this.aborted = true - this.emit("abort") -} - -Glob.prototype.pause = function () { - if (this.paused) return - if (this.sync) - this.emit("error", new Error("Can't pause/resume sync glob")) - this.paused = true - this.emit("pause") -} - -Glob.prototype.resume = function () { - if (!this.paused) return - if (this.sync) - this.emit("error", new Error("Can't pause/resume sync glob")) - this.paused = false - this.emit("resume") - this._processEmitQueue() - //process.nextTick(this.emit.bind(this, "resume")) -} - Glob.prototype._mark = function (p) { var c = this.cache[p] var m = p @@ -274,45 +261,92 @@ Glob.prototype._mark = function (p) { return m } -Glob.prototype._pushMatch = function(m, index) { - if (this.mark && m !== this.EOF) - m = this._mark(m) +Glob.prototype.abort = function () { + this.aborted = true + this.emit("abort") +} - if (m !== this.EOF) { - this.matches[index] = this.matches[index] || {} - this.matches[index][m] = true - } +Glob.prototype.pause = function () { + if (this.paused) return + if (this.sync) + this.emit("error", new Error("Can't pause/resume sync glob")) + this.paused = true + this.emit("pause") +} - this._emitQueue.push(m) +Glob.prototype.resume = function () { + if (!this.paused) return + if (this.sync) + this.emit("error", new Error("Can't pause/resume sync glob")) + this.paused = false + this.emit("resume") this._processEmitQueue() + //process.nextTick(this.emit.bind(this, "resume")) } -Glob.prototype.emitMatch = function (m, index) { - if ((!this.stat && !this.mark) || this.statCache[m] || m === this.EOF) { - this._pushMatch(m, index) - } else { - this._stat(m, function(exists, isDir) { - if (exists) - this._pushMatch(m, index) - }) - } +Glob.prototype.emitMatch = function (m) { + this.log('emitMatch', m) + this._emitQueue.push(m) + this._processEmitQueue() } Glob.prototype._processEmitQueue = function (m) { + this.log("pEQ paused=%j processing=%j m=%j", this.paused, + this._processingEmitQueue, m) + var done = false while (!this._processingEmitQueue && !this.paused) { this._processingEmitQueue = true var m = this._emitQueue.shift() + this.log(">processEmitQueue", m === this.EOF ? ":EOF:" : m) if (!m) { + this.log(">processEmitQueue, falsey m") this._processingEmitQueue = false break } - this.log('emit!', m === this.EOF ? "end" : "match") + if (m === this.EOF || !(this.mark && !this.stat)) { + this.log("peq: unmarked, or eof") + next.call(this, 0, false) + } else if (this.statCache[m]) { + var sc = this.statCache[m] + var exists + if (sc) + exists = sc.isDirectory() ? 2 : 1 + this.log("peq: stat cached") + next.call(this, exists, exists === 2) + } else { + this.log("peq: _stat, then next") + this._stat(m, next) + } - this.emit(m === this.EOF ? "end" : "match", m) - this._processingEmitQueue = false + function next(exists, isDir) { + this.log("next", m, exists, isDir) + var ev = m === this.EOF ? "end" : "match" + + // "end" can only happen once. + assert(!this._endEmitted) + if (ev === "end") + this._endEmitted = true + + if (exists) { + // Doesn't mean it necessarily doesn't exist, it's possible + // we just didn't check because we don't care that much, or + // this is EOF anyway. + if (isDir && !m.match(/\/$/)) { + m = m + "/" + } else if (!isDir && m.match(/\/$/)) { + m = m.replace(/\/+$/, "") + } + } + this.log("emit", ev, m) + this.emit(ev, m) + this._processingEmitQueue = false + if (done && m !== this.EOF && !this.paused) + this._processEmitQueue() + } } + done = true } Glob.prototype._process = function (pattern, depth, index, cb_) { @@ -367,7 +401,9 @@ Glob.prototype._process = function (pattern, depth, index, cb_) { if (process.platform === "win32") prefix = prefix.replace(/\\/g, "/") - this.emitMatch(prefix, index) + this.matches[index] = this.matches[index] || {} + this.matches[index][prefix] = true + this.emitMatch(prefix) } return cb() }) @@ -481,7 +517,9 @@ Glob.prototype._process = function (pattern, depth, index, cb_) { if (process.platform === "win32") e = e.replace(/\\/g, "/") - this.emitMatch(e, index) + this.matches[index] = this.matches[index] || {} + this.matches[index][e] = true + this.emitMatch(e) }, this) return cb.call(this) } diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 000000000..ce264faaf --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,61 @@ +{ + "_from": "glob@3.x", + "_id": "glob@3.2.11", + "_inBundle": false, + "_integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "_location": "/glob", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "glob@3.x", + "name": "glob", + "escapedName": "glob", + "rawSpec": "3.x", + "saveSpec": null, + "fetchSpec": "3.x" + }, + "_requiredBy": [ + "/express3-handlebars" + ], + "_resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "_shasum": "4a973f635b9190f715d10987d5c00fd2815ebe3d", + "_spec": "glob@3.x", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express3-handlebars", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/node-glob/issues" + }, + "bundleDependencies": false, + "dependencies": { + "inherits": "2", + "minimatch": "0.3" + }, + "deprecated": false, + "description": "a little globber", + "devDependencies": { + "mkdirp": "0", + "rimraf": "1", + "tap": "~0.4.0" + }, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/isaacs/node-glob#readme", + "license": "BSD", + "main": "glob.js", + "name": "glob", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "scripts": { + "test": "tap test/*.js", + "test-regen": "TEST_REGEN=1 node test/00-setup.js" + }, + "version": "3.2.11" +} diff --git a/node_modules/express3-handlebars/node_modules/glob/test/00-setup.js b/node_modules/glob/test/00-setup.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/00-setup.js rename to node_modules/glob/test/00-setup.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/bash-comparison.js b/node_modules/glob/test/bash-comparison.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/bash-comparison.js rename to node_modules/glob/test/bash-comparison.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/bash-results.json b/node_modules/glob/test/bash-results.json similarity index 99% rename from node_modules/express3-handlebars/node_modules/glob/test/bash-results.json rename to node_modules/glob/test/bash-results.json index 593215cd9..8051c7238 100644 --- a/node_modules/express3-handlebars/node_modules/glob/test/bash-results.json +++ b/node_modules/glob/test/bash-results.json @@ -298,6 +298,7 @@ "./test/new-glob-optional-options.js", "./test/nocase-nomagic.js", "./test/pause-resume.js", + "./test/readme-issue.js", "./test/root-nomount.js", "./test/root.js", "./test/stat.js", diff --git a/node_modules/express3-handlebars/node_modules/glob/test/cwd-test.js b/node_modules/glob/test/cwd-test.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/cwd-test.js rename to node_modules/glob/test/cwd-test.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/globstar-match.js b/node_modules/glob/test/globstar-match.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/globstar-match.js rename to node_modules/glob/test/globstar-match.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/mark.js b/node_modules/glob/test/mark.js similarity index 73% rename from node_modules/express3-handlebars/node_modules/glob/test/mark.js rename to node_modules/glob/test/mark.js index e74282073..bf411c0e5 100644 --- a/node_modules/express3-handlebars/node_modules/glob/test/mark.js +++ b/node_modules/glob/test/mark.js @@ -2,6 +2,42 @@ var test = require("tap").test var glob = require('../') process.chdir(__dirname) +// expose timing issues +var lag = 5 +glob.Glob.prototype._stat = function(o) { return function(f, cb) { + var args = arguments + setTimeout(function() { + o.call(this, f, cb) + }.bind(this), lag += 5) +}}(glob.Glob.prototype._stat) + + +test("mark, with **", function (t) { + glob("a/*b*/**", {mark: true}, function (er, results) { + if (er) + throw er + var expect = + [ 'a/abcdef/', + 'a/abcdef/g/', + 'a/abcdef/g/h', + 'a/abcfed/', + 'a/abcfed/g/', + 'a/abcfed/g/h', + 'a/b/', + 'a/b/c/', + 'a/b/c/d', + 'a/bc/', + 'a/bc/e/', + 'a/bc/e/f', + 'a/cb/', + 'a/cb/e/', + 'a/cb/e/f' ] + + t.same(results, expect) + t.end() + }) +}) + test("mark, no / on pattern", function (t) { glob("a/*", {mark: true}, function (er, results) { if (er) diff --git a/node_modules/express3-handlebars/node_modules/glob/test/new-glob-optional-options.js b/node_modules/glob/test/new-glob-optional-options.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/new-glob-optional-options.js rename to node_modules/glob/test/new-glob-optional-options.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/nocase-nomagic.js b/node_modules/glob/test/nocase-nomagic.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/nocase-nomagic.js rename to node_modules/glob/test/nocase-nomagic.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/pause-resume.js b/node_modules/glob/test/pause-resume.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/pause-resume.js rename to node_modules/glob/test/pause-resume.js diff --git a/node_modules/glob/test/readme-issue.js b/node_modules/glob/test/readme-issue.js new file mode 100644 index 000000000..0b4e0be29 --- /dev/null +++ b/node_modules/glob/test/readme-issue.js @@ -0,0 +1,36 @@ +var test = require("tap").test +var glob = require("../") + +var mkdirp = require("mkdirp") +var fs = require("fs") +var rimraf = require("rimraf") +var dir = __dirname + "/package" + +test("setup", function (t) { + mkdirp.sync(dir) + fs.writeFileSync(dir + "/package.json", "{}", "ascii") + fs.writeFileSync(dir + "/README", "x", "ascii") + t.pass("setup done") + t.end() +}) + +test("glob", function (t) { + var opt = { + cwd: dir, + nocase: true, + mark: true + } + + glob("README?(.*)", opt, function (er, files) { + if (er) + throw er + t.same(files, ["README"]) + t.end() + }) +}) + +test("cleanup", function (t) { + rimraf.sync(dir) + t.pass("clean") + t.end() +}) diff --git a/node_modules/express3-handlebars/node_modules/glob/test/root-nomount.js b/node_modules/glob/test/root-nomount.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/root-nomount.js rename to node_modules/glob/test/root-nomount.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/root.js b/node_modules/glob/test/root.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/root.js rename to node_modules/glob/test/root.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/stat.js b/node_modules/glob/test/stat.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/stat.js rename to node_modules/glob/test/stat.js diff --git a/node_modules/express3-handlebars/node_modules/glob/test/zz-cleanup.js b/node_modules/glob/test/zz-cleanup.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/test/zz-cleanup.js rename to node_modules/glob/test/zz-cleanup.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/.npmignore b/node_modules/handlebars/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/.npmignore rename to node_modules/handlebars/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/handlebars/LICENSE b/node_modules/handlebars/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/LICENSE rename to node_modules/handlebars/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/handlebars/README.markdown b/node_modules/handlebars/README.markdown similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/README.markdown rename to node_modules/handlebars/README.markdown diff --git a/node_modules/express3-handlebars/node_modules/handlebars/bin/handlebars b/node_modules/handlebars/bin/handlebars old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/bin/handlebars rename to node_modules/handlebars/bin/handlebars diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars.js b/node_modules/handlebars/dist/amd/handlebars.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars.js rename to node_modules/handlebars/dist/amd/handlebars.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars.runtime.js b/node_modules/handlebars/dist/amd/handlebars.runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars.runtime.js rename to node_modules/handlebars/dist/amd/handlebars.runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/base.js b/node_modules/handlebars/dist/amd/handlebars/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/base.js rename to node_modules/handlebars/dist/amd/handlebars/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/ast.js b/node_modules/handlebars/dist/amd/handlebars/compiler/ast.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/ast.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/ast.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/base.js b/node_modules/handlebars/dist/amd/handlebars/compiler/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/base.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/compiler.js b/node_modules/handlebars/dist/amd/handlebars/compiler/compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/compiler.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/javascript-compiler.js b/node_modules/handlebars/dist/amd/handlebars/compiler/javascript-compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/javascript-compiler.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/javascript-compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/parser.js b/node_modules/handlebars/dist/amd/handlebars/compiler/parser.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/parser.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/parser.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/printer.js b/node_modules/handlebars/dist/amd/handlebars/compiler/printer.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/printer.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/printer.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/visitor.js b/node_modules/handlebars/dist/amd/handlebars/compiler/visitor.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/compiler/visitor.js rename to node_modules/handlebars/dist/amd/handlebars/compiler/visitor.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/exception.js b/node_modules/handlebars/dist/amd/handlebars/exception.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/exception.js rename to node_modules/handlebars/dist/amd/handlebars/exception.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/runtime.js b/node_modules/handlebars/dist/amd/handlebars/runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/runtime.js rename to node_modules/handlebars/dist/amd/handlebars/runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/safe-string.js b/node_modules/handlebars/dist/amd/handlebars/safe-string.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/safe-string.js rename to node_modules/handlebars/dist/amd/handlebars/safe-string.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/utils.js b/node_modules/handlebars/dist/amd/handlebars/utils.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/amd/handlebars/utils.js rename to node_modules/handlebars/dist/amd/handlebars/utils.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars.js b/node_modules/handlebars/dist/cjs/handlebars.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars.js rename to node_modules/handlebars/dist/cjs/handlebars.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars.runtime.js b/node_modules/handlebars/dist/cjs/handlebars.runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars.runtime.js rename to node_modules/handlebars/dist/cjs/handlebars.runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/base.js b/node_modules/handlebars/dist/cjs/handlebars/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/base.js rename to node_modules/handlebars/dist/cjs/handlebars/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/ast.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/ast.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/ast.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/ast.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/base.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/base.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/compiler.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/compiler.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/javascript-compiler.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/javascript-compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/javascript-compiler.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/javascript-compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/parser.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/parser.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/parser.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/parser.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/printer.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/printer.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/printer.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/printer.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/visitor.js b/node_modules/handlebars/dist/cjs/handlebars/compiler/visitor.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/compiler/visitor.js rename to node_modules/handlebars/dist/cjs/handlebars/compiler/visitor.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/exception.js b/node_modules/handlebars/dist/cjs/handlebars/exception.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/exception.js rename to node_modules/handlebars/dist/cjs/handlebars/exception.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/runtime.js b/node_modules/handlebars/dist/cjs/handlebars/runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/runtime.js rename to node_modules/handlebars/dist/cjs/handlebars/runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/safe-string.js b/node_modules/handlebars/dist/cjs/handlebars/safe-string.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/safe-string.js rename to node_modules/handlebars/dist/cjs/handlebars/safe-string.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/utils.js b/node_modules/handlebars/dist/cjs/handlebars/utils.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/cjs/handlebars/utils.js rename to node_modules/handlebars/dist/cjs/handlebars/utils.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.amd.js b/node_modules/handlebars/dist/handlebars.amd.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.amd.js rename to node_modules/handlebars/dist/handlebars.amd.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.amd.min.js b/node_modules/handlebars/dist/handlebars.amd.min.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.amd.min.js rename to node_modules/handlebars/dist/handlebars.amd.min.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.js b/node_modules/handlebars/dist/handlebars.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.js rename to node_modules/handlebars/dist/handlebars.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.min.js b/node_modules/handlebars/dist/handlebars.min.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.min.js rename to node_modules/handlebars/dist/handlebars.min.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.amd.js b/node_modules/handlebars/dist/handlebars.runtime.amd.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.amd.js rename to node_modules/handlebars/dist/handlebars.runtime.amd.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.amd.min.js b/node_modules/handlebars/dist/handlebars.runtime.amd.min.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.amd.min.js rename to node_modules/handlebars/dist/handlebars.runtime.amd.min.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.js b/node_modules/handlebars/dist/handlebars.runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.js rename to node_modules/handlebars/dist/handlebars.runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.min.js b/node_modules/handlebars/dist/handlebars.runtime.min.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/dist/handlebars.runtime.min.js rename to node_modules/handlebars/dist/handlebars.runtime.min.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars.js b/node_modules/handlebars/lib/handlebars.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars.js rename to node_modules/handlebars/lib/handlebars.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars.runtime.js b/node_modules/handlebars/lib/handlebars.runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars.runtime.js rename to node_modules/handlebars/lib/handlebars.runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/base.js b/node_modules/handlebars/lib/handlebars/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/base.js rename to node_modules/handlebars/lib/handlebars/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/ast.js b/node_modules/handlebars/lib/handlebars/compiler/ast.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/ast.js rename to node_modules/handlebars/lib/handlebars/compiler/ast.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/base.js b/node_modules/handlebars/lib/handlebars/compiler/base.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/base.js rename to node_modules/handlebars/lib/handlebars/compiler/base.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/compiler.js b/node_modules/handlebars/lib/handlebars/compiler/compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/compiler.js rename to node_modules/handlebars/lib/handlebars/compiler/compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/javascript-compiler.js b/node_modules/handlebars/lib/handlebars/compiler/javascript-compiler.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/javascript-compiler.js rename to node_modules/handlebars/lib/handlebars/compiler/javascript-compiler.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/parser.js b/node_modules/handlebars/lib/handlebars/compiler/parser.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/parser.js rename to node_modules/handlebars/lib/handlebars/compiler/parser.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/printer.js b/node_modules/handlebars/lib/handlebars/compiler/printer.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/printer.js rename to node_modules/handlebars/lib/handlebars/compiler/printer.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/visitor.js b/node_modules/handlebars/lib/handlebars/compiler/visitor.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/compiler/visitor.js rename to node_modules/handlebars/lib/handlebars/compiler/visitor.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/exception.js b/node_modules/handlebars/lib/handlebars/exception.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/exception.js rename to node_modules/handlebars/lib/handlebars/exception.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/runtime.js b/node_modules/handlebars/lib/handlebars/runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/runtime.js rename to node_modules/handlebars/lib/handlebars/runtime.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/safe-string.js b/node_modules/handlebars/lib/handlebars/safe-string.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/safe-string.js rename to node_modules/handlebars/lib/handlebars/safe-string.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/utils.js b/node_modules/handlebars/lib/handlebars/utils.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/handlebars/utils.js rename to node_modules/handlebars/lib/handlebars/utils.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/lib/index.js b/node_modules/handlebars/lib/index.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/lib/index.js rename to node_modules/handlebars/lib/index.js diff --git a/node_modules/handlebars/package.json b/node_modules/handlebars/package.json new file mode 100644 index 000000000..4903c8e13 --- /dev/null +++ b/node_modules/handlebars/package.json @@ -0,0 +1,91 @@ +{ + "_from": "handlebars@1.x", + "_id": "handlebars@1.3.0", + "_inBundle": false, + "_integrity": "sha1-npsTCpPjiUkTItl1zz7BgYw3zjQ=", + "_location": "/handlebars", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "handlebars@1.x", + "name": "handlebars", + "escapedName": "handlebars", + "rawSpec": "1.x", + "saveSpec": null, + "fetchSpec": "1.x" + }, + "_requiredBy": [ + "/express3-handlebars" + ], + "_resolved": "https://registry.npmjs.org/handlebars/-/handlebars-1.3.0.tgz", + "_shasum": "9e9b130a93e389491322d975cf3ec1818c37ce34", + "_spec": "handlebars@1.x", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express3-handlebars", + "author": { + "name": "Yehuda Katz" + }, + "barename": "handlebars", + "bin": { + "handlebars": "bin/handlebars" + }, + "bugs": { + "url": "https://github.com/wycats/handlebars.js/issues" + }, + "bundleDependencies": false, + "dependencies": { + "optimist": "~0.3", + "uglify-js": "~2.3" + }, + "deprecated": false, + "description": "Handlebars provides the power necessary to let you build semantic templates effectively with no frustration", + "devDependencies": { + "async": "~0.2.9", + "aws-sdk": "~1.5.0", + "benchmark": "~1.0", + "dustjs-linkedin": "~2.0.2", + "eco": "~1.1.0-rc-3", + "es6-module-packager": "0.x", + "grunt": "~0.4.1", + "grunt-cli": "~0.1.10", + "grunt-contrib-clean": "~0.4.1", + "grunt-contrib-concat": "~0.3.0", + "grunt-contrib-connect": "~0.5.0", + "grunt-contrib-copy": "~0.4.1", + "grunt-contrib-jshint": "0.x", + "grunt-contrib-requirejs": "~0.4.1", + "grunt-contrib-uglify": "~0.2.2", + "grunt-contrib-watch": "~0.5.3", + "grunt-saucelabs": "~4.1.2", + "jison": "~0.3.0", + "keen.io": "0.0.3", + "mocha": "*", + "mustache": "~0.7.2", + "semver": "~2.1.0", + "underscore": "~1.5.1" + }, + "engines": { + "node": ">=0.4.7" + }, + "homepage": "http://www.handlebarsjs.com/", + "keywords": [ + "handlebars", + "mustache", + "template", + "html" + ], + "license": "MIT", + "main": "lib/index.js", + "name": "handlebars", + "optionalDependencies": { + "uglify-js": "~2.3" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/wycats/handlebars.js.git" + }, + "scripts": { + "test": "grunt" + }, + "version": "1.3.0" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/release-notes.md b/node_modules/handlebars/release-notes.md similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/release-notes.md rename to node_modules/handlebars/release-notes.md diff --git a/node_modules/express3-handlebars/node_modules/handlebars/runtime.js b/node_modules/handlebars/runtime.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/runtime.js rename to node_modules/handlebars/runtime.js diff --git a/node_modules/mongoose/node_modules/hooks/.npmignore b/node_modules/hooks/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/hooks/.npmignore rename to node_modules/hooks/.npmignore diff --git a/node_modules/mongoose/node_modules/hooks/Makefile b/node_modules/hooks/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/hooks/Makefile rename to node_modules/hooks/Makefile diff --git a/node_modules/mongoose/node_modules/hooks/README.md b/node_modules/hooks/README.md similarity index 100% rename from node_modules/mongoose/node_modules/hooks/README.md rename to node_modules/hooks/README.md diff --git a/node_modules/mongoose/node_modules/hooks/hooks.alt.js b/node_modules/hooks/hooks.alt.js similarity index 100% rename from node_modules/mongoose/node_modules/hooks/hooks.alt.js rename to node_modules/hooks/hooks.alt.js diff --git a/node_modules/mongoose/node_modules/hooks/hooks.js b/node_modules/hooks/hooks.js similarity index 100% rename from node_modules/mongoose/node_modules/hooks/hooks.js rename to node_modules/hooks/hooks.js diff --git a/node_modules/hooks/package.json b/node_modules/hooks/package.json new file mode 100644 index 000000000..c6f7a74ba --- /dev/null +++ b/node_modules/hooks/package.json @@ -0,0 +1,70 @@ +{ + "_from": "hooks@0.2.1", + "_id": "hooks@0.2.1", + "_inBundle": false, + "_integrity": "sha1-D1kbGzRL3LPfWXc/Yvu6+Fv0Aos=", + "_location": "/hooks", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "hooks@0.2.1", + "name": "hooks", + "escapedName": "hooks", + "rawSpec": "0.2.1", + "saveSpec": null, + "fetchSpec": "0.2.1" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz", + "_shasum": "0f591b1b344bdcb3df59773f62fbbaf85bf4028b", + "_spec": "hooks@0.2.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Brian Noguchi", + "email": "brian.noguchi@gmail.com", + "url": "https://github.com/bnoguchi/" + }, + "bugs": { + "url": "https://github.com/bnoguchi/hooks-js/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Adds pre and post hook functionality to your JavaScript methods.", + "devDependencies": { + "expresso": ">=0.7.6", + "should": ">=0.2.1", + "underscore": ">=1.1.4" + }, + "directories": { + "lib": "." + }, + "engines": { + "node": ">=0.4.0" + }, + "homepage": "https://github.com/bnoguchi/hooks-js/", + "keywords": [ + "node", + "hooks", + "middleware", + "pre", + "post" + ], + "licenses": [ + "MIT" + ], + "main": "./hooks.js", + "name": "hooks", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git://github.com/bnoguchi/hooks-js.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.2.1" +} diff --git a/node_modules/mongoose/node_modules/hooks/test.js b/node_modules/hooks/test.js similarity index 100% rename from node_modules/mongoose/node_modules/hooks/test.js rename to node_modules/hooks/test.js diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/LICENSE rename to node_modules/inherits/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/README.md b/node_modules/inherits/README.md similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/inherits/README.md rename to node_modules/inherits/README.md diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 000000000..f71f2d932 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 000000000..86bbb3dc2 --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 000000000..7e62ceaa3 --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,62 @@ +{ + "_from": "inherits@~2.0.1", + "_id": "inherits@2.0.4", + "_inBundle": false, + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "_location": "/inherits", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "inherits@~2.0.1", + "name": "inherits", + "escapedName": "inherits", + "rawSpec": "~2.0.1", + "saveSpec": null, + "fetchSpec": "~2.0.1" + }, + "_requiredBy": [ + "/glob", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@~2.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/readable-stream", + "browser": "./inherits_browser.js", + "bugs": { + "url": "https://github.com/isaacs/inherits/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ], + "homepage": "https://github.com/isaacs/inherits#readme", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "license": "ISC", + "main": "./inherits.js", + "name": "inherits", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/inherits.git" + }, + "scripts": { + "test": "tap" + }, + "version": "2.0.4" +} diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md new file mode 100644 index 000000000..052a62b8d --- /dev/null +++ b/node_modules/isarray/README.md @@ -0,0 +1,54 @@ + +# isarray + +`Array#isArray` for older browsers. + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/isarray/build/build.js b/node_modules/isarray/build/build.js new file mode 100644 index 000000000..ec58596ae --- /dev/null +++ b/node_modules/isarray/build/build.js @@ -0,0 +1,209 @@ + +/** + * Require the given path. + * + * @param {String} path + * @return {Object} exports + * @api public + */ + +function require(path, parent, orig) { + var resolved = require.resolve(path); + + // lookup failed + if (null == resolved) { + orig = orig || path; + parent = parent || 'root'; + var err = new Error('Failed to require "' + orig + '" from "' + parent + '"'); + err.path = orig; + err.parent = parent; + err.require = true; + throw err; + } + + var module = require.modules[resolved]; + + // perform real require() + // by invoking the module's + // registered function + if (!module.exports) { + module.exports = {}; + module.client = module.component = true; + module.call(this, module.exports, require.relative(resolved), module); + } + + return module.exports; +} + +/** + * Registered modules. + */ + +require.modules = {}; + +/** + * Registered aliases. + */ + +require.aliases = {}; + +/** + * Resolve `path`. + * + * Lookup: + * + * - PATH/index.js + * - PATH.js + * - PATH + * + * @param {String} path + * @return {String} path or null + * @api private + */ + +require.resolve = function(path) { + if (path.charAt(0) === '/') path = path.slice(1); + var index = path + '/index.js'; + + var paths = [ + path, + path + '.js', + path + '.json', + path + '/index.js', + path + '/index.json' + ]; + + for (var i = 0; i < paths.length; i++) { + var path = paths[i]; + if (require.modules.hasOwnProperty(path)) return path; + } + + if (require.aliases.hasOwnProperty(index)) { + return require.aliases[index]; + } +}; + +/** + * Normalize `path` relative to the current path. + * + * @param {String} curr + * @param {String} path + * @return {String} + * @api private + */ + +require.normalize = function(curr, path) { + var segs = []; + + if ('.' != path.charAt(0)) return path; + + curr = curr.split('/'); + path = path.split('/'); + + for (var i = 0; i < path.length; ++i) { + if ('..' == path[i]) { + curr.pop(); + } else if ('.' != path[i] && '' != path[i]) { + segs.push(path[i]); + } + } + + return curr.concat(segs).join('/'); +}; + +/** + * Register module at `path` with callback `definition`. + * + * @param {String} path + * @param {Function} definition + * @api private + */ + +require.register = function(path, definition) { + require.modules[path] = definition; +}; + +/** + * Alias a module definition. + * + * @param {String} from + * @param {String} to + * @api private + */ + +require.alias = function(from, to) { + if (!require.modules.hasOwnProperty(from)) { + throw new Error('Failed to alias "' + from + '", it does not exist'); + } + require.aliases[to] = from; +}; + +/** + * Return a require function relative to the `parent` path. + * + * @param {String} parent + * @return {Function} + * @api private + */ + +require.relative = function(parent) { + var p = require.normalize(parent, '..'); + + /** + * lastIndexOf helper. + */ + + function lastIndexOf(arr, obj) { + var i = arr.length; + while (i--) { + if (arr[i] === obj) return i; + } + return -1; + } + + /** + * The relative require() itself. + */ + + function localRequire(path) { + var resolved = localRequire.resolve(path); + return require(resolved, parent, path); + } + + /** + * Resolve relative to the parent. + */ + + localRequire.resolve = function(path) { + var c = path.charAt(0); + if ('/' == c) return path.slice(1); + if ('.' == c) return require.normalize(p, path); + + // resolve deps by returning + // the dep in the nearest "deps" + // directory + var segs = parent.split('/'); + var i = lastIndexOf(segs, 'deps') + 1; + if (!i) i = 0; + path = segs.slice(0, i + 1).join('/') + '/deps/' + path; + return path; + }; + + /** + * Check if module is defined at `path`. + */ + + localRequire.exists = function(path) { + return require.modules.hasOwnProperty(localRequire.resolve(path)); + }; + + return localRequire; +}; +require.register("isarray/index.js", function(exports, require, module){ +module.exports = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]'; +}; + +}); +require.alias("isarray/index.js", "isarray/index.js"); + diff --git a/node_modules/isarray/component.json b/node_modules/isarray/component.json new file mode 100644 index 000000000..9e31b6838 --- /dev/null +++ b/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/isarray/index.js b/node_modules/isarray/index.js new file mode 100644 index 000000000..5f5ad45d4 --- /dev/null +++ b/node_modules/isarray/index.js @@ -0,0 +1,3 @@ +module.exports = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/isarray/package.json b/node_modules/isarray/package.json new file mode 100644 index 000000000..72f6ca96d --- /dev/null +++ b/node_modules/isarray/package.json @@ -0,0 +1,57 @@ +{ + "_from": "isarray@0.0.1", + "_id": "isarray@0.0.1", + "_inBundle": false, + "_integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "_location": "/isarray", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "isarray@0.0.1", + "name": "isarray", + "escapedName": "isarray", + "rawSpec": "0.0.1", + "saveSpec": null, + "fetchSpec": "0.0.1" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf", + "_spec": "isarray@0.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/readable-stream", + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "bugs": { + "url": "https://github.com/juliangruber/isarray/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Array#isArray for older browsers", + "devDependencies": { + "tap": "*" + }, + "homepage": "https://github.com/juliangruber/isarray", + "keywords": [ + "browser", + "isarray", + "array" + ], + "license": "MIT", + "main": "index.js", + "name": "isarray", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "0.0.1" +} diff --git a/node_modules/mongodb/node_modules/kerberos/LICENSE b/node_modules/kerberos/LICENSE similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/LICENSE rename to node_modules/kerberos/LICENSE diff --git a/node_modules/mongodb/node_modules/kerberos/README.md b/node_modules/kerberos/README.md similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/README.md rename to node_modules/kerberos/README.md diff --git a/node_modules/mongodb/node_modules/kerberos/binding.gyp b/node_modules/kerberos/binding.gyp similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/binding.gyp rename to node_modules/kerberos/binding.gyp diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Makefile b/node_modules/kerberos/build/Makefile similarity index 76% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Makefile rename to node_modules/kerberos/build/Makefile index 901981cc7..9f3d95fee 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Makefile +++ b/node_modules/kerberos/build/Makefile @@ -41,30 +41,22 @@ all_deps := CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) +CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) +CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) LINK.target ?= $(LINK) LDFLAGS.target ?= $(LDFLAGS) AR.target ?= $(AR) # C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= flock $(builddir)/linker.lock $(CXX.target) +LINK ?= $(CXX.target) # TODO(evan): move all cross-compilation logic to gyp-time so we don't need # to replicate this environment fallback in make as well. CC.host ?= gcc -CFLAGS.host ?= +CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) CXX.host ?= g++ -CXXFLAGS.host ?= +CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) LINK.host ?= $(CXX.host) LDFLAGS.host ?= AR.host ?= ar @@ -134,6 +126,34 @@ cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $ quiet_cmd_cxx = CXX($(TOOLSET)) $@ cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" + quiet_cmd_touch = TOUCH $@ cmd_touch = touch $@ @@ -141,39 +161,17 @@ quiet_cmd_copy = COPY $@ # send stderr to /dev/null to ignore messages when linking directories. cmd_copy = rm -rf "$@" && cp -af "$<" "$@" -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) # Define an escape_quotes function to escape single quotes. @@ -238,7 +236,7 @@ define do_cmd $(if $(or $(command_changed),$(prereq_changed)), @$(call exact_echo, $($(quiet)cmd_$(1))) @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 1,$(cmd_$1))), + $(if $(findstring flock,$(word 2,$(cmd_$1))), @$(cmd_$(1)) @echo " $(quiet_cmd_$(1)): Finished", @$(cmd_$(1)) @@ -276,6 +274,10 @@ $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD @@ -290,6 +292,10 @@ $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD @@ -303,6 +309,10 @@ $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD @@ -316,8 +326,8 @@ ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ endif quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/home/vagrant/lab4/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/home/vagrant/.node-gyp/0.10.24/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/home/vagrant/.node-gyp/0.10.24" "-Dmodule_root_dir=/home/vagrant/lab4/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos" binding.gyp -Makefile: $(srcdir)/../../../../../../../../../.node-gyp/0.10.24/common.gypi $(srcdir)/../../../../../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp +cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/kerberos/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4" "-Dnode_gyp_dir=/usr/local/lib/node_modules/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/<(target_arch)/node.lib" "-Dmodule_root_dir=/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/kerberos" "-Dnode_engine=v8" binding.gyp +Makefile: $(srcdir)/../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/../../../../../Library/Caches/node-gyp/14.15.4/include/node/common.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(call do_cmd,regen_makefile) # "all" is a concatenation of the "all" targets from all the included diff --git a/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos/lib/kerberos.o.d.raw b/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos/lib/kerberos.o.d.raw new file mode 100644 index 000000000..0e0ae2815 --- /dev/null +++ b/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos/lib/kerberos.o.d.raw @@ -0,0 +1,12 @@ +Release/obj.target/kerberos/lib/kerberos.o: ../lib/kerberos.cc \ + ../lib/kerberos.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/cppgc/common.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8config.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-internal.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-platform.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_object_wrap.h \ + ../lib/kerberosgss.h ../lib/worker.h ../lib/kerberos_context.h diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile b/node_modules/kerberos/build/binding.Makefile similarity index 70% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile rename to node_modules/kerberos/build/binding.Makefile index d0d9c64a7..69e964f59 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile +++ b/node_modules/kerberos/build/binding.Makefile @@ -1,6 +1,6 @@ # This file is generated by gyp; do not edit. -export builddir_name ?= build/./. +export builddir_name ?= ./build/. .PHONY: all all: $(MAKE) kerberos diff --git a/node_modules/kerberos/build/config.gypi b/node_modules/kerberos/build/config.gypi new file mode 100644 index 000000000..6a09d0a25 --- /dev/null +++ b/node_modules/kerberos/build/config.gypi @@ -0,0 +1,203 @@ +# Do not edit. File was generated by node-gyp's "configure" step +{ + "target_defaults": { + "cflags": [], + "default_configuration": "Release", + "defines": [], + "include_dirs": [], + "libraries": [] + }, + "variables": { + "asan": 0, + "build_v8_with_gn": "false", + "coverage": "false", + "dcheck_always_on": 0, + "debug_nghttp2": "false", + "debug_node": "false", + "enable_lto": "false", + "enable_pgo_generate": "false", + "enable_pgo_use": "false", + "error_on_warn": "false", + "force_dynamic_crt": 0, + "host_arch": "x64", + "icu_data_in": "../../deps/icu-tmp/icudt67l.dat", + "icu_endianness": "l", + "icu_gyp_path": "tools/icu/icu-generic.gyp", + "icu_path": "deps/icu-small", + "icu_small": "false", + "icu_ver_major": "67", + "is_debug": 0, + "llvm_version": "11.0", + "napi_build_version": "7", + "node_byteorder": "little", + "node_debug_lib": "false", + "node_enable_d8": "false", + "node_install_npm": "true", + "node_module_version": 83, + "node_no_browser_globals": "false", + "node_prefix": "/usr/local", + "node_release_urlbase": "https://nodejs.org/download/release/", + "node_shared": "false", + "node_shared_brotli": "false", + "node_shared_cares": "false", + "node_shared_http_parser": "false", + "node_shared_libuv": "false", + "node_shared_nghttp2": "false", + "node_shared_openssl": "false", + "node_shared_zlib": "false", + "node_tag": "", + "node_target_type": "executable", + "node_use_bundled_v8": "true", + "node_use_dtrace": "true", + "node_use_etw": "false", + "node_use_node_code_cache": "true", + "node_use_node_snapshot": "true", + "node_use_openssl": "true", + "node_use_v8_platform": "true", + "node_with_ltcg": "false", + "node_without_node_options": "false", + "openssl_fips": "", + "openssl_is_fips": "false", + "ossfuzz": "false", + "shlib_suffix": "83.dylib", + "target_arch": "x64", + "v8_enable_31bit_smis_on_64bit_arch": 0, + "v8_enable_gdbjit": 0, + "v8_enable_i18n_support": 1, + "v8_enable_inspector": 1, + "v8_enable_lite_mode": 0, + "v8_enable_object_print": 1, + "v8_enable_pointer_compression": 0, + "v8_no_strict_aliasing": 1, + "v8_optimized_debug": 1, + "v8_promise_internal_field_count": 1, + "v8_random_seed": 0, + "v8_trace_maps": 0, + "v8_use_siphash": 1, + "want_separate_host_toolset": 0, + "xcode_version": "11.0", + "nodedir": "/Users/tamtranht02/Library/Caches/node-gyp/14.15.4", + "standalone_static_library": 1, + "dry_run": "", + "legacy_bundling": "", + "save_dev": "", + "browser": "", + "commit_hooks": "true", + "only": "", + "viewer": "man", + "also": "", + "rollback": "true", + "sign_git_commit": "", + "audit": "true", + "usage": "", + "globalignorefile": "/usr/local/etc/npmignore", + "init_author_url": "", + "maxsockets": "50", + "shell": "/bin/bash", + "metrics_registry": "https://registry.npmjs.org/", + "parseable": "", + "shrinkwrap": "true", + "init_license": "ISC", + "timing": "", + "if_present": "", + "cache_max": "Infinity", + "init_author_email": "", + "sign_git_tag": "", + "cert": "", + "git_tag_version": "true", + "local_address": "", + "long": "", + "preid": "", + "fetch_retries": "2", + "registry": "https://registry.npmjs.org/", + "key": "", + "message": "%s", + "versions": "", + "globalconfig": "/usr/local/etc/npmrc", + "always_auth": "", + "logs_max": "10", + "prefer_online": "", + "cache_lock_retries": "10", + "global_style": "", + "update_notifier": "true", + "audit_level": "low", + "heading": "npm", + "fetch_retry_mintimeout": "10000", + "offline": "", + "read_only": "", + "searchlimit": "20", + "access": "", + "json": "", + "allow_same_version": "", + "description": "true", + "engine_strict": "", + "https_proxy": "", + "init_module": "/Users/tamtranht02/.npm-init.js", + "userconfig": "/Users/tamtranht02/.npmrc", + "cidr": "", + "node_version": "14.15.4", + "user": "", + "auth_type": "legacy", + "editor": "vi", + "ignore_prepublish": "", + "save": "true", + "script_shell": "", + "tag": "latest", + "before": "", + "global": "", + "progress": "true", + "ham_it_up": "", + "optional": "true", + "searchstaleness": "900", + "bin_links": "true", + "force": "", + "save_prod": "", + "searchopts": "", + "depth": "Infinity", + "node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js", + "rebuild_bundle": "true", + "sso_poll_frequency": "500", + "unicode": "true", + "fetch_retry_maxtimeout": "60000", + "ca": "", + "save_prefix": "^", + "scripts_prepend_node_path": "warn-only", + "sso_type": "oauth", + "strict_ssl": "true", + "tag_version_prefix": "v", + "dev": "", + "fetch_retry_factor": "10", + "group": "20", + "save_exact": "", + "cache_lock_stale": "60000", + "prefer_offline": "", + "version": "", + "cache_min": "10", + "otp": "", + "cache": "/Users/tamtranht02/.npm", + "searchexclude": "", + "color": "true", + "package_lock": "true", + "fund": "true", + "package_lock_only": "", + "save_optional": "", + "user_agent": "npm/6.14.10 node/v14.15.4 darwin x64", + "ignore_scripts": "", + "cache_lock_wait": "10000", + "production": "", + "save_bundle": "", + "send_metrics": "", + "init_version": "1.0.0", + "node_options": "", + "umask": "0022", + "scope": "", + "git": "git", + "init_author_name": "", + "onload_script": "", + "tmp": "/var/folders/gz/y49xvbws5fl_4hbmp6h870vh0000gn/T", + "unsafe_perm": "true", + "format_package_lock": "true", + "link": "", + "prefix": "/usr/local" + } +} diff --git a/node_modules/kerberos/build/gyp-mac-tool b/node_modules/kerberos/build/gyp-mac-tool new file mode 100755 index 000000000..033b4e538 --- /dev/null +++ b/node_modules/kerberos/build/gyp-mac-tool @@ -0,0 +1,615 @@ +#!/usr/bin/env python +# Generated by gyp. Do not edit. +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + +from __future__ import print_function + +import fcntl +import fnmatch +import glob +import json +import os +import plistlib +import re +import shutil +import string +import subprocess +import sys +import tempfile + +PY3 = bytes != str + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool(object): + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecCopyBundleResource(self, source, dest, convert_to_binary): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + # TODO(thakis): This copies file attributes like mtime, while the + # single-file branch below doesn't. This should probably be changed to + # be consistent with the single-file branch. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == '.xib': + return self._CopyXIBFile(source, dest) + elif extension == '.storyboard': + return self._CopyXIBFile(source, dest) + elif extension == '.strings': + self._CopyStringsFile(source, dest, convert_to_binary) + else: + shutil.copy(source, dest) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + + # ibtool sometimes crashes with relative paths. See crbug.com/314728. + base = os.path.dirname(os.path.realpath(__file__)) + if os.path.relpath(source): + source = os.path.join(base, source) + if os.path.relpath(dest): + dest = os.path.join(base, dest) + + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text', '--compile', dest, source] + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) + current_section_header = None + for line in ibtoolout.stdout: + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + sys.stdout.write(current_section_header) + current_section_header = None + sys.stdout.write(line) + return ibtoolout.returncode + + def _ConvertToBinary(self, dest): + subprocess.check_call([ + 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) + + def _CopyStringsFile(self, source, dest, convert_to_binary): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + s = open(source, 'rb').read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + + fp = open(dest, 'wb') + fp.write(s.decode(input_code).encode('UTF-16')) + fp.close() + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + fp = open(file_name, 'rb') + try: + header = fp.read(3) + except Exception: + fp.close() + return None + fp.close() + if header.startswith("\xFE\xFF"): + return "UTF-16" + elif header.startswith("\xFF\xFE"): + return "UTF-16" + elif header.startswith("\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + fd = open(source, 'r') + lines = fd.read() + fd.close() + + # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). + plist = plistlib.readPlistFromString(lines) + if keys: + plist = dict(plist.items() + json.loads(keys[0]).items()) + lines = plistlib.writePlistToString(plist) + + # Go through all the environment variables and replace them as variables in + # the file. + IDENT_RE = re.compile(r'[/\s]') + for key in os.environ: + if key.startswith('_'): + continue + evar = '${%s}' % key + evalue = os.environ[key] + lines = string.replace(lines, evar, evalue) + + # Xcode supports various suffices on environment variables, which are + # all undocumented. :rfc1034identifier is used in the standard project + # template these days, and :identifier was used earlier. They are used to + # convert non-url characters into things that look like valid urls -- + # except that the replacement character for :identifier, '_' isn't valid + # in a URL either -- oops, hence :rfc1034identifier was born. + evar = '${%s:identifier}' % key + evalue = IDENT_RE.sub('_', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + evar = '${%s:rfc1034identifier}' % key + evalue = IDENT_RE.sub('-', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + # Remove any keys with values that haven't been replaced. + lines = lines.split('\n') + for i in range(len(lines)): + if lines[i].strip().startswith("${"): + lines[i] = None + lines[i - 1] = None + lines = '\n'.join(filter(lambda x: x is not None, lines)) + + # Write out the file with variables replaced. + fd = open(dest, 'w') + fd.write(lines) + fd.close() + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: # Wrong length resets everything, too. + signature_code = '?' * 4 + + dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') + fp = open(dest, 'w') + fp.write('%s%s' % (package_type, signature_code)) + fp.close() + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out '/path/to/libtool: file: foo.o has no + symbols'.""" + libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re5 = re.compile( + r'^.*libtool: warning for library: ' + + r'.* the table of contents is empty ' + + r'\(no object file members in the library define global symbols\)$') + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env['ZERO_AR_DATE'] = '1' + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + if PY3: + err = err.decode('utf-8') + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print(line, file=sys.stderr) + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): + os.utime(cmd_list[i+1], None) + break + return libtoolout.returncode + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + def ExecCompileXcassets(self, keys, *inputs): + """Compiles multiple .xcassets files into a single .car file. + + This invokes 'actool' to compile all the inputs .xcassets files. The + |keys| arguments is a json-encoded dictionary of extra arguments to + pass to 'actool' when the asset catalogs contains an application icon + or a launch image. + + Note that 'actool' does not create the Assets.car file if the asset + catalogs does not contains imageset. + """ + command_line = [ + 'xcrun', 'actool', '--output-format', 'human-readable-text', + '--compress-pngs', '--notices', '--warnings', '--errors', + ] + is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ + if is_iphone_target: + platform = os.environ['CONFIGURATION'].split('-')[-1] + if platform not in ('iphoneos', 'iphonesimulator'): + platform = 'iphonesimulator' + command_line.extend([ + '--platform', platform, '--target-device', 'iphone', + '--target-device', 'ipad', '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', + os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), + ]) + else: + command_line.extend([ + '--platform', 'macosx', '--target-device', 'mac', + '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], + '--compile', + os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), + ]) + if keys: + keys = json.loads(keys) + for key, value in keys.items(): + arg_name = '--' + key + if isinstance(value, bool): + if value: + command_line.append(arg_name) + elif isinstance(value, list): + for v in value: + command_line.append(arg_name) + command_line.append(str(v)) + else: + command_line.append(arg_name) + command_line.append(str(value)) + # Note: actool crashes if inputs path are relative, so use os.path.abspath + # to get absolute path name for inputs. + command_line.extend(map(os.path.abspath, inputs)) + subprocess.check_call(command_line) + + def ExecMergeInfoPlist(self, output, *inputs): + """Merge multiple .plist files into a single .plist file.""" + merged_plist = {} + for path in inputs: + plist = self._LoadPlistMaybeBinary(path) + self._MergePlist(merged_plist, plist) + plistlib.writePlist(merged_plist, output) + + def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. copy ResourceRules.plist from the user or the SDK into the bundle, + 2. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 3. copy Entitlements.plist from user or SDK next to the bundle, + 4. code sign the bundle. + """ + resource_rules_path = self._InstallResourceRules(resource_rules) + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier()) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides) + subprocess.check_call([ + 'codesign', '--force', '--sign', key, '--resource-rules', + resource_rules_path, '--entitlements', entitlements_path, + os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['FULL_PRODUCT_NAME'])]) + + def _InstallResourceRules(self, resource_rules): + """Installs ResourceRules.plist from user or SDK into the bundle. + + Args: + resource_rules: string, optional, path to the ResourceRules.plist file + to use, default to "${SDKROOT}/ResourceRules.plist" + + Returns: + Path to the copy of ResourceRules.plist into the bundle. + """ + source_path = resource_rules + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'ResourceRules.plist') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], 'ResourceRules.plist') + shutil.copy2(source_path, target_path) + return target_path + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier) + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'embedded.mobileprovision') + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') + return substitutions, provisioning_data['Entitlements'] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + if not os.path.isdir(profiles_dir): + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, '*.mobileprovision')) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get( + 'Entitlements', {}).get('application-identifier', '') + for team_identifier in profile_data.get('TeamIdentifier', []): + app_id = '%s.%s' % (team_identifier, bundle_identifier) + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, profile_data, team_identifier) + if not valid_provisioning_profiles: + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call([ + 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) + return self._LoadPlistMaybeBinary(temp.name) + + def _MergePlist(self, merged_plist, plist): + """Merge |plist| into |merged_plist|.""" + for key, value in plist.items(): + if isinstance(value, dict): + merged_value = merged_plist.get(key, {}) + if isinstance(merged_value, dict): + self._MergePlist(merged_value, value) + merged_plist[key] = merged_value + else: + merged_plist[key] = value + else: + merged_plist[key] = value + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['INFOPLIST_PATH']) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data['CFBundleIdentifier'] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['PRODUCT_NAME'] + '.xcent') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], + 'Entitlements.plist') + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.items(): + data = data.replace('$(%s)' % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return {k: self._ExpandVariables(data[k], substitutions) for k in data} + return data + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/kerberos/build/kerberos.target.mk b/node_modules/kerberos/build/kerberos.target.mk new file mode 100644 index 000000000..5b22d7560 --- /dev/null +++ b/node_modules/kerberos/build/kerberos.target.mk @@ -0,0 +1,204 @@ +# This file is generated by gyp; do not edit. + +TOOLSET := target +TARGET := kerberos +DEFS_Debug := \ + '-DNODE_GYP_MODULE_NAME=kerberos' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-D__MACOSX_CORE__' \ + '-DBUILDING_NODE_EXTENSION' \ + '-DDEBUG' \ + '-D_DEBUG' \ + '-DV8_ENABLE_CHECKS' + +# Flags passed to all source files. +CFLAGS_Debug := \ + -O0 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Debug := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Debug := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Debug := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Debug := + +INCS_Debug := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +DEFS_Release := \ + '-DNODE_GYP_MODULE_NAME=kerberos' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-D__MACOSX_CORE__' \ + '-DBUILDING_NODE_EXTENSION' + +# Flags passed to all source files. +CFLAGS_Release := \ + -O3 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Release := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Release := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Release := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Release := + +INCS_Release := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +OBJS := \ + $(obj).target/$(TARGET)/lib/kerberos.o \ + $(obj).target/$(TARGET)/lib/worker.o \ + $(obj).target/$(TARGET)/lib/kerberosgss.o \ + $(obj).target/$(TARGET)/lib/base64.o \ + $(obj).target/$(TARGET)/lib/kerberos_context.o + +# Add to the list of files we specially track dependencies for. +all_deps += $(OBJS) + +# CFLAGS et al overrides must be target-local. +# See "Target-specific Variable Values" in the GNU Make manual. +$(OBJS): TOOLSET := $(TOOLSET) +$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) +$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) + +# Suffix rules, putting all outputs into $(obj). + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) + +# Try building from generated source, too. + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) + +# End of this set of suffix rules +### Rules for final target. +LDFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LDFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LIBS := \ + -lkrb5 + +$(builddir)/kerberos.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) +$(builddir)/kerberos.node: LIBS := $(LIBS) +$(builddir)/kerberos.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) +$(builddir)/kerberos.node: TOOLSET := $(TOOLSET) +$(builddir)/kerberos.node: $(OBJS) FORCE_DO_CMD + $(call do_cmd,solink_module) + +all_deps += $(builddir)/kerberos.node +# Add target alias +.PHONY: kerberos +kerberos: $(builddir)/kerberos.node + +# Short alias for building this executable. +.PHONY: kerberos.node +kerberos.node: $(builddir)/kerberos.node + +# Add executable to "all" target. +.PHONY: all +all: $(builddir)/kerberos.node + diff --git a/node_modules/mongodb/node_modules/kerberos/index.js b/node_modules/kerberos/index.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/index.js rename to node_modules/kerberos/index.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js b/node_modules/kerberos/lib/auth_processes/mongodb.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js rename to node_modules/kerberos/lib/auth_processes/mongodb.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/base64.c b/node_modules/kerberos/lib/base64.c similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/base64.c rename to node_modules/kerberos/lib/base64.c diff --git a/node_modules/mongodb/node_modules/kerberos/lib/base64.h b/node_modules/kerberos/lib/base64.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/base64.h rename to node_modules/kerberos/lib/base64.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc b/node_modules/kerberos/lib/kerberos.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc rename to node_modules/kerberos/lib/kerberos.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberos.h b/node_modules/kerberos/lib/kerberos.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberos.h rename to node_modules/kerberos/lib/kerberos.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberos.js b/node_modules/kerberos/lib/kerberos.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberos.js rename to node_modules/kerberos/lib/kerberos.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc b/node_modules/kerberos/lib/kerberos_context.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc rename to node_modules/kerberos/lib/kerberos_context.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h b/node_modules/kerberos/lib/kerberos_context.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h rename to node_modules/kerberos/lib/kerberos_context.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c b/node_modules/kerberos/lib/kerberosgss.c similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c rename to node_modules/kerberos/lib/kerberosgss.c diff --git a/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h b/node_modules/kerberos/lib/kerberosgss.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h rename to node_modules/kerberos/lib/kerberosgss.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/sspi.js b/node_modules/kerberos/lib/sspi.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/sspi.js rename to node_modules/kerberos/lib/sspi.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c b/node_modules/kerberos/lib/win32/base64.c similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c rename to node_modules/kerberos/lib/win32/base64.c diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h b/node_modules/kerberos/lib/win32/base64.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h rename to node_modules/kerberos/lib/win32/base64.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc b/node_modules/kerberos/lib/win32/kerberos.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc rename to node_modules/kerberos/lib/win32/kerberos.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h b/node_modules/kerberos/lib/win32/kerberos.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h rename to node_modules/kerberos/lib/win32/kerberos.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c b/node_modules/kerberos/lib/win32/kerberos_sspi.c similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c rename to node_modules/kerberos/lib/win32/kerberos_sspi.c diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h b/node_modules/kerberos/lib/win32/kerberos_sspi.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h rename to node_modules/kerberos/lib/win32/kerberos_sspi.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc b/node_modules/kerberos/lib/win32/worker.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc rename to node_modules/kerberos/lib/win32/worker.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h b/node_modules/kerberos/lib/win32/worker.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h rename to node_modules/kerberos/lib/win32/worker.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc b/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc rename to node_modules/kerberos/lib/win32/wrappers/security_buffer.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h b/node_modules/kerberos/lib/win32/wrappers/security_buffer.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h rename to node_modules/kerberos/lib/win32/wrappers/security_buffer.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js b/node_modules/kerberos/lib/win32/wrappers/security_buffer.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js rename to node_modules/kerberos/lib/win32/wrappers/security_buffer.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc b/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc rename to node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h b/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h rename to node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js b/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js rename to node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc b/node_modules/kerberos/lib/win32/wrappers/security_context.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc rename to node_modules/kerberos/lib/win32/wrappers/security_context.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h b/node_modules/kerberos/lib/win32/wrappers/security_context.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h rename to node_modules/kerberos/lib/win32/wrappers/security_context.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js b/node_modules/kerberos/lib/win32/wrappers/security_context.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js rename to node_modules/kerberos/lib/win32/wrappers/security_context.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc b/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc rename to node_modules/kerberos/lib/win32/wrappers/security_credentials.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h b/node_modules/kerberos/lib/win32/wrappers/security_credentials.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h rename to node_modules/kerberos/lib/win32/wrappers/security_credentials.h diff --git a/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js b/node_modules/kerberos/lib/win32/wrappers/security_credentials.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js rename to node_modules/kerberos/lib/win32/wrappers/security_credentials.js diff --git a/node_modules/mongodb/node_modules/kerberos/lib/worker.cc b/node_modules/kerberos/lib/worker.cc similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/worker.cc rename to node_modules/kerberos/lib/worker.cc diff --git a/node_modules/mongodb/node_modules/kerberos/lib/worker.h b/node_modules/kerberos/lib/worker.h similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/lib/worker.h rename to node_modules/kerberos/lib/worker.h diff --git a/node_modules/kerberos/package.json b/node_modules/kerberos/package.json new file mode 100644 index 000000000..d913e6787 --- /dev/null +++ b/node_modules/kerberos/package.json @@ -0,0 +1,57 @@ +{ + "_from": "kerberos@0.0.3", + "_id": "kerberos@0.0.3", + "_inBundle": false, + "_integrity": "sha1-QoXZKgdI2yeEBi9a3OyfWVbLgYo=", + "_location": "/kerberos", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "kerberos@0.0.3", + "name": "kerberos", + "escapedName": "kerberos", + "rawSpec": "0.0.3", + "saveSpec": null, + "fetchSpec": "0.0.3" + }, + "_requiredBy": [ + "/mongodb", + "/mquery/mongodb" + ], + "_resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz", + "_shasum": "4285d92a0748db2784062f5adcec9f5956cb818a", + "_spec": "kerberos@0.0.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongodb", + "author": { + "name": "Christian Amor Kvalheim" + }, + "bugs": { + "url": "https://github.com/christkv/kerberos/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Kerberos library for Node.js", + "devDependencies": { + "nodeunit": "latest" + }, + "gitHead": "bb01d4fe322e022999aca19da564e7d9db59a8ed", + "homepage": "https://github.com/christkv/kerberos#readme", + "keywords": [ + "kerberos", + "security", + "authentication" + ], + "license": "Apache 2.0", + "main": "index.js", + "name": "kerberos", + "repository": { + "type": "git", + "url": "git+https://github.com/christkv/kerberos.git" + }, + "scripts": { + "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", + "test": "nodeunit ./test" + }, + "version": "0.0.3" +} diff --git a/node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js b/node_modules/kerberos/test/kerberos_tests.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js rename to node_modules/kerberos/test/kerberos_tests.js diff --git a/node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js b/node_modules/kerberos/test/kerberos_win32_test.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js rename to node_modules/kerberos/test/kerberos_win32_test.js diff --git a/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js b/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js rename to node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js diff --git a/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js b/node_modules/kerberos/test/win32/security_buffer_tests.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js rename to node_modules/kerberos/test/win32/security_buffer_tests.js diff --git a/node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js b/node_modules/kerberos/test/win32/security_credentials_tests.js similarity index 100% rename from node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js rename to node_modules/kerberos/test/win32/security_credentials_tests.js diff --git a/node_modules/express/node_modules/commander/node_modules/keypress/README.md b/node_modules/keypress/README.md similarity index 100% rename from node_modules/express/node_modules/commander/node_modules/keypress/README.md rename to node_modules/keypress/README.md diff --git a/node_modules/express/node_modules/commander/node_modules/keypress/index.js b/node_modules/keypress/index.js similarity index 100% rename from node_modules/express/node_modules/commander/node_modules/keypress/index.js rename to node_modules/keypress/index.js diff --git a/node_modules/keypress/package.json b/node_modules/keypress/package.json new file mode 100644 index 000000000..8e8a1e3fb --- /dev/null +++ b/node_modules/keypress/package.json @@ -0,0 +1,53 @@ +{ + "_from": "keypress@0.1.x", + "_id": "keypress@0.1.0", + "_inBundle": false, + "_integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=", + "_location": "/keypress", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "keypress@0.1.x", + "name": "keypress", + "escapedName": "keypress", + "rawSpec": "0.1.x", + "saveSpec": null, + "fetchSpec": "0.1.x" + }, + "_requiredBy": [ + "/commander" + ], + "_resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", + "_shasum": "4a3188d4291b66b4f65edb99f806aa9ae293592a", + "_spec": "keypress@0.1.x", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/commander", + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://tootallnate.net" + }, + "bugs": { + "url": "https://github.com/TooTallNate/keypress/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Make any Node ReadableStream emit \"keypress\" events", + "homepage": "https://github.com/TooTallNate/keypress#readme", + "keywords": [ + "keypress", + "readline", + "core" + ], + "license": "MIT", + "main": "index.js", + "name": "keypress", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/keypress.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "0.1.0" +} diff --git a/node_modules/express/node_modules/commander/node_modules/keypress/test.js b/node_modules/keypress/test.js similarity index 100% rename from node_modules/express/node_modules/commander/node_modules/keypress/test.js rename to node_modules/keypress/test.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/.npmignore b/node_modules/lru-cache/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/.npmignore rename to node_modules/lru-cache/.npmignore diff --git a/node_modules/lru-cache/.travis.yml b/node_modules/lru-cache/.travis.yml new file mode 100644 index 000000000..4af02b3d1 --- /dev/null +++ b/node_modules/lru-cache/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - '0.8' + - '0.10' + - '0.12' + - 'iojs' +before_install: + - npm install -g npm@latest diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/CONTRIBUTORS b/node_modules/lru-cache/CONTRIBUTORS similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/CONTRIBUTORS rename to node_modules/lru-cache/CONTRIBUTORS diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md similarity index 70% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md rename to node_modules/lru-cache/README.md index 03ee0f985..c06814e04 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/README.md +++ b/node_modules/lru-cache/README.md @@ -24,6 +24,24 @@ If you put more stuff in it, then items will fall out. If you try to put an oversized thing in it, then it'll fall out right away. +## Keys should always be Strings or Numbers + +Note: this module will print warnings to `console.error` if you use a +key that is not a String or Number. Because items are stored in an +object, which coerces keys to a string, it won't go well for you if +you try to use a key that is not a unique string, it'll cause surprise +collisions. For example: + +```JavaScript +// Bad Example! Dont' do this! +var cache = LRU() +var a = {} +var b = {} +cache.set(a, 'this is a') +cache.set(b, 'this is b') +console.log(cache.get(a)) // prints: 'this is b' +``` + ## Options * `max` The maximum size of the cache, checked by applying the length @@ -36,7 +54,7 @@ away. * `length` Function that is used to calculate the length of stored items. If you're storing strings or buffers, then you probably want to do something like `function(n){return n.length}`. The default is - `function(n){return 1}`, which is fine if you want to store `n` + `function(n){return 1}`, which is fine if you want to store `max` like-sized things. * `dispose` Function that is called on items when they are dropped from the cache. This can be handy if you want to close file @@ -54,11 +72,12 @@ away. ## API -* `set(key, value)` +* `set(key, value, maxAge)` * `get(key) => value` Both of these will update the "recently used"-ness of the key. - They do what you think. + They do what you think. `max` is optional and overrides the + cache `max` option if provided. * `peek(key)` @@ -95,3 +114,24 @@ away. * `values()` Return an array of the values in the cache. + +* `length()` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js b/node_modules/lru-cache/lib/lru-cache.js similarity index 70% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js rename to node_modules/lru-cache/lib/lru-cache.js index d1d138172..2bbe653be 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js +++ b/node_modules/lru-cache/lib/lru-cache.js @@ -13,6 +13,14 @@ function hOP (obj, key) { function naiveLength () { return 1 } +var didTypeWarning = false +function typeCheckKey(key) { + if (!didTypeWarning && typeof key !== 'string' && typeof key !== 'number') { + didTypeWarning = true + console.error(new TypeError("LRU: key must be a string or number. Almost certainly a bug! " + typeof key).stack) + } +} + function LRUCache (options) { if (!(this instanceof LRUCache)) return new LRUCache(options) @@ -86,11 +94,13 @@ Object.defineProperty(LRUCache.prototype, "itemCount", LRUCache.prototype.forEach = function (fn, thisp) { thisp = thisp || this - var i = 0; - for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) { + var i = 0 + var itemCount = this._itemCount + + for (var k = this._mru - 1; k >= 0 && i < itemCount; k--) if (this._lruList[k]) { i++ var hit = this._lruList[k] - if (this._maxAge && (Date.now() - hit.now > this._maxAge)) { + if (isStale(this, hit)) { del(this, hit) if (!this._allowStale) hit = undefined } @@ -135,29 +145,60 @@ LRUCache.prototype.reset = function () { this._itemCount = 0 } -// Provided for debugging/dev purposes only. No promises whatsoever that -// this API stays stable. LRUCache.prototype.dump = function () { - return this._cache + var arr = [] + var i = 0 + + for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) { + var hit = this._lruList[k] + if (!isStale(this, hit)) { + //Do not store staled hits + ++i + arr.push({ + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }); + } + } + //arr has the most read first + return arr } LRUCache.prototype.dumpLru = function () { return this._lruList } -LRUCache.prototype.set = function (key, value) { +LRUCache.prototype.set = function (key, value, maxAge) { + maxAge = maxAge || this._maxAge + typeCheckKey(key) + + var now = maxAge ? Date.now() : 0 + var len = this._lengthCalculator(value) + if (hOP(this._cache, key)) { + if (len > this._max) { + del(this, this._cache[key]) + return false + } // dispose of the old one before overwriting - if (this._dispose) this._dispose(key, this._cache[key].value) - if (this._maxAge) this._cache[key].now = Date.now() + if (this._dispose) + this._dispose(key, this._cache[key].value) + + this._cache[key].now = now + this._cache[key].maxAge = maxAge this._cache[key].value = value + this._length += (len - this._cache[key].length) + this._cache[key].length = len this.get(key) + + if (this._length > this._max) + trim(this) + return true } - var len = this._lengthCalculator(value) - var age = this._maxAge ? Date.now() : 0 - var hit = new Entry(key, value, this._mru++, len, age) + var hit = new Entry(key, value, this._mru++, len, now, maxAge) // oversized objects fall out of cache automatically. if (hit.length > this._max) { @@ -169,24 +210,29 @@ LRUCache.prototype.set = function (key, value) { this._lruList[hit.lu] = this._cache[key] = hit this._itemCount ++ - if (this._length > this._max) trim(this) + if (this._length > this._max) + trim(this) + return true } LRUCache.prototype.has = function (key) { + typeCheckKey(key) if (!hOP(this._cache, key)) return false var hit = this._cache[key] - if (this._maxAge && (Date.now() - hit.now > this._maxAge)) { + if (isStale(this, hit)) { return false } return true } LRUCache.prototype.get = function (key) { + typeCheckKey(key) return get(this, key, true) } LRUCache.prototype.peek = function (key) { + typeCheckKey(key) return get(this, key, false) } @@ -197,13 +243,36 @@ LRUCache.prototype.pop = function () { } LRUCache.prototype.del = function (key) { + typeCheckKey(key) del(this, this._cache[key]) } +LRUCache.prototype.load = function (arr) { + //reset the cache + this.reset(); + + var now = Date.now() + //A previous serialized cache has the most recent items first + for (var l = arr.length - 1; l >= 0; l-- ) { + var hit = arr[l] + typeCheckKey(hit.k) + var expiresAt = hit.e || 0 + if (expiresAt === 0) { + //the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + } else { + var maxAge = expiresAt - now + //dont add already expired items + if (maxAge > 0) this.set(hit.k, hit.v, maxAge) + } + } +} + function get (self, key, doUse) { + typeCheckKey(key) var hit = self._cache[key] if (hit) { - if (self._maxAge && (Date.now() - hit.now > self._maxAge)) { + if (isStale(self, hit)) { del(self, hit) if (!self._allowStale) hit = undefined } else { @@ -214,6 +283,18 @@ function get (self, key, doUse) { return hit } +function isStale(self, hit) { + if (!hit || (!hit.maxAge && !self._maxAge)) return false + var stale = false; + var diff = Date.now() - hit.now + if (hit.maxAge) { + stale = diff > hit.maxAge + } else { + stale = self._maxAge && (diff > self._maxAge) + } + return stale; +} + function use (self, hit) { shiftLU(self, hit) hit.lu = self._mru ++ @@ -241,12 +322,13 @@ function del (self, hit) { } // classy, since V8 prefers predictable objects. -function Entry (key, value, lu, length, now) { +function Entry (key, value, lu, length, now, maxAge) { this.key = key this.value = value this.lu = lu this.length = length this.now = now + if (maxAge) this.maxAge = maxAge } })() diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json new file mode 100644 index 000000000..deee9404f --- /dev/null +++ b/node_modules/lru-cache/package.json @@ -0,0 +1,56 @@ +{ + "_from": "lru-cache@2", + "_id": "lru-cache@2.7.3", + "_inBundle": false, + "_integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=", + "_location": "/lru-cache", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "lru-cache@2", + "name": "lru-cache", + "escapedName": "lru-cache", + "rawSpec": "2", + "saveSpec": null, + "fetchSpec": "2" + }, + "_requiredBy": [ + "/minimatch" + ], + "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "_shasum": "6d4524e8b955f95d4f5b58851ce21dd72fb4e952", + "_spec": "lru-cache@2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/minimatch", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "bugs": { + "url": "https://github.com/isaacs/node-lru-cache/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "A cache object that deletes the least-recently-used items.", + "devDependencies": { + "tap": "^1.2.0", + "weak": "" + }, + "homepage": "https://github.com/isaacs/node-lru-cache#readme", + "keywords": [ + "mru", + "lru", + "cache" + ], + "license": "ISC", + "main": "lib/lru-cache.js", + "name": "lru-cache", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, + "scripts": { + "test": "tap test --gc" + }, + "version": "2.7.3" +} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/basic.js b/node_modules/lru-cache/test/basic.js similarity index 86% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/basic.js rename to node_modules/lru-cache/test/basic.js index f72697c46..b47225f10 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/basic.js +++ b/node_modules/lru-cache/test/basic.js @@ -93,31 +93,6 @@ test("reset", function (t) { }) -// Note: `.dump()` is a debugging tool only. No guarantees are made -// about the format/layout of the response. -test("dump", function (t) { - var cache = new LRU(10) - var d = cache.dump(); - t.equal(Object.keys(d).length, 0, "nothing in dump for empty cache") - cache.set("a", "A") - var d = cache.dump() // { a: { key: "a", value: "A", lu: 0 } } - t.ok(d.a) - t.equal(d.a.key, "a") - t.equal(d.a.value, "A") - t.equal(d.a.lu, 0) - - cache.set("b", "B") - cache.get("b") - d = cache.dump() - t.ok(d.b) - t.equal(d.b.key, "b") - t.equal(d.b.value, "B") - t.equal(d.b.lu, 2) - - t.end() -}) - - test("basic with weighed length", function (t) { var cache = new LRU({ max: 100, @@ -182,6 +157,32 @@ test("lru recently gotten with weighed length", function (t) { t.end() }) +test("lru recently updated with weighed length", function (t) { + var cache = new LRU({ + max: 8, + length: function (item) { return item.length } + }) + cache.set("a", "A") + cache.set("b", "BB") + cache.set("c", "CCC") + t.equal(cache.length, 6) //CCC BB A + cache.set("a", "+A") + t.equal(cache.length, 7) //+A CCC BB + cache.set("b", "++BB") + t.equal(cache.length, 6) //++BB +A + t.equal(cache.get("c"), undefined) + + cache.set("c", "oversized") + t.equal(cache.length, 6) //++BB +A + t.equal(cache.get("c"), undefined) + + cache.set("a", "oversized") + t.equal(cache.length, 4) //++BB + t.equal(cache.get("a"), undefined) + t.equal(cache.get("b"), "++BB") + t.end() +}) + test("set returns proper booleans", function(t) { var cache = new LRU({ max: 5, @@ -215,7 +216,7 @@ test("drop the old items", function(t) { cache.set("c", "C") // timed out t.notOk(cache.get("a")) - }, 60) + }, 60 + 25) setTimeout(function () { t.notOk(cache.get("b")) @@ -228,6 +229,32 @@ test("drop the old items", function(t) { }, 155) }) +test("individual item can have it's own maxAge", function(t) { + var cache = new LRU({ + max: 5, + maxAge: 50 + }) + + cache.set("a", "A", 20) + setTimeout(function () { + t.notOk(cache.get("a")) + t.end() + }, 25) +}) + +test("individual item can have it's own maxAge > cache's", function(t) { + var cache = new LRU({ + max: 5, + maxAge: 20 + }) + + cache.set("a", "A", 50) + setTimeout(function () { + t.equal(cache.get("a"), "A") + t.end() + }, 25) +}) + test("disposal function", function(t) { var disposed = false var cache = new LRU({ diff --git a/node_modules/lru-cache/test/foreach.js b/node_modules/lru-cache/test/foreach.js new file mode 100644 index 000000000..4190417cb --- /dev/null +++ b/node_modules/lru-cache/test/foreach.js @@ -0,0 +1,120 @@ +var test = require('tap').test +var LRU = require('../') + +test('forEach', function (t) { + var l = new LRU(5) + for (var i = 0; i < 10; i ++) { + l.set(i.toString(), i.toString(2)) + } + + var i = 9 + l.forEach(function (val, key, cache) { + t.equal(cache, l) + t.equal(key, i.toString()) + t.equal(val, i.toString(2)) + i -= 1 + }) + + // get in order of most recently used + l.get(6) + l.get(8) + + var order = [ 8, 6, 9, 7, 5 ] + var i = 0 + + l.forEach(function (val, key, cache) { + var j = order[i ++] + t.equal(cache, l) + t.equal(key, j.toString()) + t.equal(val, j.toString(2)) + }) + t.equal(i, order.length); + + t.end() +}) + +test('keys() and values()', function (t) { + var l = new LRU(5) + for (var i = 0; i < 10; i ++) { + l.set(i.toString(), i.toString(2)) + } + + t.similar(l.keys(), ['9', '8', '7', '6', '5']) + t.similar(l.values(), ['1001', '1000', '111', '110', '101']) + + // get in order of most recently used + l.get(6) + l.get(8) + + t.similar(l.keys(), ['8', '6', '9', '7', '5']) + t.similar(l.values(), ['1000', '110', '1001', '111', '101']) + + t.end() +}) + +test('all entries are iterated over', function(t) { + var l = new LRU(5) + for (var i = 0; i < 10; i ++) { + l.set(i.toString(), i.toString(2)) + } + + var i = 0 + l.forEach(function (val, key, cache) { + if (i > 0) { + cache.del(key) + } + i += 1 + }) + + t.equal(i, 5) + t.equal(l.keys().length, 1) + + t.end() +}) + +test('all stale entries are removed', function(t) { + var l = new LRU({ max: 5, maxAge: -5, stale: true }) + for (var i = 0; i < 10; i ++) { + l.set(i.toString(), i.toString(2)) + } + + var i = 0 + l.forEach(function () { + i += 1 + }) + + t.equal(i, 5) + t.equal(l.keys().length, 0) + + t.end() +}) + +test('expires', function (t) { + var l = new LRU({ + max: 10, + maxAge: 50 + }) + for (var i = 0; i < 10; i++) { + l.set(i.toString(), i.toString(2), ((i % 2) ? 25 : undefined)) + } + + var i = 0 + var order = [ 8, 6, 4, 2, 0 ] + setTimeout(function () { + l.forEach(function (val, key, cache) { + var j = order[i++] + t.equal(cache, l) + t.equal(key, j.toString()) + t.equal(val, j.toString(2)) + }) + t.equal(i, order.length); + + setTimeout(function () { + var count = 0; + l.forEach(function (val, key, cache) { count++; }) + t.equal(0, count); + t.end() + }, 25) + + }, 26) +}) diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/memory-leak.js b/node_modules/lru-cache/test/memory-leak.js similarity index 99% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/memory-leak.js rename to node_modules/lru-cache/test/memory-leak.js index 7af45b022..b5912f6f1 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/test/memory-leak.js +++ b/node_modules/lru-cache/test/memory-leak.js @@ -1,5 +1,6 @@ #!/usr/bin/env node --expose_gc + var weak = require('weak'); var test = require('tap').test var LRU = require('../') diff --git a/node_modules/lru-cache/test/serialize.js b/node_modules/lru-cache/test/serialize.js new file mode 100644 index 000000000..1094194a0 --- /dev/null +++ b/node_modules/lru-cache/test/serialize.js @@ -0,0 +1,216 @@ +var test = require('tap').test +var LRU = require('../') + +test('dump', function (t) { + var cache = new LRU() + + t.equal(cache.dump().length, 0, "nothing in dump for empty cache") + + cache.set("a", "A") + cache.set("b", "B") + t.deepEqual(cache.dump(), [ + { k: "b", v: "B", e: 0 }, + { k: "a", v: "A", e: 0 } + ]) + + cache.set("a", "A"); + t.deepEqual(cache.dump(), [ + { k: "a", v: "A", e: 0 }, + { k: "b", v: "B", e: 0 } + ]) + + cache.get("b"); + t.deepEqual(cache.dump(), [ + { k: "b", v: "B", e: 0 }, + { k: "a", v: "A", e: 0 } + ]) + + cache.del("a"); + t.deepEqual(cache.dump(), [ + { k: "b", v: "B", e: 0 } + ]) + + t.end() +}) + +test("do not dump stale items", function(t) { + var cache = new LRU({ + max: 5, + maxAge: 50 + }) + + //expires at 50 + cache.set("a", "A") + + setTimeout(function () { + //expires at 75 + cache.set("b", "B") + var s = cache.dump() + t.equal(s.length, 2) + t.equal(s[0].k, "b") + t.equal(s[1].k, "a") + }, 25) + + setTimeout(function () { + //expires at 110 + cache.set("c", "C") + var s = cache.dump() + t.equal(s.length, 2) + t.equal(s[0].k, "c") + t.equal(s[1].k, "b") + }, 60) + + setTimeout(function () { + //expires at 130 + cache.set("d", "D", 40) + var s = cache.dump() + t.equal(s.length, 2) + t.equal(s[0].k, "d") + t.equal(s[1].k, "c") + }, 90) + + setTimeout(function () { + var s = cache.dump() + t.equal(s.length, 1) + t.equal(s[0].k, "d") + }, 120) + + setTimeout(function () { + var s = cache.dump() + t.deepEqual(s, []) + t.end() + }, 155) +}) + +test("load basic cache", function(t) { + var cache = new LRU(), + copy = new LRU() + + cache.set("a", "A") + cache.set("b", "B") + + copy.load(cache.dump()) + t.deepEquals(cache.dump(), copy.dump()) + + t.end() +}) + + +test("load staled cache", function(t) { + var cache = new LRU({maxAge: 50}), + copy = new LRU({maxAge: 50}), + arr + + //expires at 50 + cache.set("a", "A") + setTimeout(function () { + //expires at 80 + cache.set("b", "B") + arr = cache.dump() + t.equal(arr.length, 2) + }, 30) + + setTimeout(function () { + copy.load(arr) + t.equal(copy.get("a"), undefined) + t.equal(copy.get("b"), "B") + }, 60) + + setTimeout(function () { + t.equal(copy.get("b"), undefined) + t.end() + }, 90) +}) + +test("load to other size cache", function(t) { + var cache = new LRU({max: 2}), + copy = new LRU({max: 1}) + + cache.set("a", "A") + cache.set("b", "B") + + copy.load(cache.dump()) + t.equal(copy.get("a"), undefined) + t.equal(copy.get("b"), "B") + + //update the last read from original cache + cache.get("a") + copy.load(cache.dump()) + t.equal(copy.get("a"), "A") + t.equal(copy.get("b"), undefined) + + t.end() +}) + + +test("load to other age cache", function(t) { + var cache = new LRU({maxAge: 50}), + aged = new LRU({maxAge: 100}), + simple = new LRU(), + arr, + expired + + //created at 0 + //a would be valid till 0 + 50 + cache.set("a", "A") + setTimeout(function () { + //created at 20 + //b would be valid till 20 + 50 + cache.set("b", "B") + //b would be valid till 20 + 70 + cache.set("c", "C", 70) + arr = cache.dump() + t.equal(arr.length, 3) + }, 20) + + setTimeout(function () { + t.equal(cache.get("a"), undefined) + t.equal(cache.get("b"), "B") + t.equal(cache.get("c"), "C") + + aged.load(arr) + t.equal(aged.get("a"), undefined) + t.equal(aged.get("b"), "B") + t.equal(aged.get("c"), "C") + + simple.load(arr) + t.equal(simple.get("a"), undefined) + t.equal(simple.get("b"), "B") + t.equal(simple.get("c"), "C") + }, 60) + + setTimeout(function () { + t.equal(cache.get("a"), undefined) + t.equal(cache.get("b"), undefined) + t.equal(cache.get("c"), "C") + + aged.load(arr) + t.equal(aged.get("a"), undefined) + t.equal(aged.get("b"), undefined) + t.equal(aged.get("c"), "C") + + simple.load(arr) + t.equal(simple.get("a"), undefined) + t.equal(simple.get("b"), undefined) + t.equal(simple.get("c"), "C") + }, 80) + + setTimeout(function () { + t.equal(cache.get("a"), undefined) + t.equal(cache.get("b"), undefined) + t.equal(cache.get("c"), undefined) + + aged.load(arr) + t.equal(aged.get("a"), undefined) + t.equal(aged.get("b"), undefined) + t.equal(aged.get("c"), undefined) + + simple.load(arr) + t.equal(simple.get("a"), undefined) + t.equal(simple.get("b"), undefined) + t.equal(simple.get("c"), undefined) + t.end() + }, 100) + +}) + diff --git a/node_modules/express/node_modules/merge-descriptors/.npmignore b/node_modules/merge-descriptors/.npmignore similarity index 100% rename from node_modules/express/node_modules/merge-descriptors/.npmignore rename to node_modules/merge-descriptors/.npmignore diff --git a/node_modules/express/node_modules/merge-descriptors/README.md b/node_modules/merge-descriptors/README.md similarity index 100% rename from node_modules/express/node_modules/merge-descriptors/README.md rename to node_modules/merge-descriptors/README.md diff --git a/node_modules/express/node_modules/merge-descriptors/component.json b/node_modules/merge-descriptors/component.json similarity index 100% rename from node_modules/express/node_modules/merge-descriptors/component.json rename to node_modules/merge-descriptors/component.json diff --git a/node_modules/express/node_modules/merge-descriptors/index.js b/node_modules/merge-descriptors/index.js similarity index 100% rename from node_modules/express/node_modules/merge-descriptors/index.js rename to node_modules/merge-descriptors/index.js diff --git a/node_modules/merge-descriptors/package.json b/node_modules/merge-descriptors/package.json new file mode 100644 index 000000000..12e51289f --- /dev/null +++ b/node_modules/merge-descriptors/package.json @@ -0,0 +1,47 @@ +{ + "_from": "merge-descriptors@0.0.1", + "_id": "merge-descriptors@0.0.1", + "_inBundle": false, + "_integrity": "sha1-L/CYDJJM+B0LXR+2ARd8uLtWwNA=", + "_location": "/merge-descriptors", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "merge-descriptors@0.0.1", + "name": "merge-descriptors", + "escapedName": "merge-descriptors", + "rawSpec": "0.0.1", + "saveSpec": null, + "fetchSpec": "0.0.1" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", + "_shasum": "2ff0980c924cf81d0b5d1fb601177cb8bb56c0d0", + "_spec": "merge-descriptors@0.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + }, + "bugs": { + "url": "https://github.com/jonathanong/merge-descriptors/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Merge objects using descriptors", + "homepage": "https://github.com/jonathanong/merge-descriptors#readme", + "license": "MIT", + "name": "merge-descriptors", + "repository": { + "type": "git", + "url": "git+https://github.com/jonathanong/merge-descriptors.git" + }, + "scripts": { + "test": "make test;" + }, + "version": "0.0.1" +} diff --git a/node_modules/express/node_modules/methods/History.md b/node_modules/methods/History.md similarity index 100% rename from node_modules/express/node_modules/methods/History.md rename to node_modules/methods/History.md diff --git a/node_modules/express/node_modules/methods/Readme.md b/node_modules/methods/Readme.md similarity index 100% rename from node_modules/express/node_modules/methods/Readme.md rename to node_modules/methods/Readme.md diff --git a/node_modules/express/node_modules/methods/index.js b/node_modules/methods/index.js similarity index 100% rename from node_modules/express/node_modules/methods/index.js rename to node_modules/methods/index.js diff --git a/node_modules/methods/package.json b/node_modules/methods/package.json new file mode 100644 index 000000000..30b882e23 --- /dev/null +++ b/node_modules/methods/package.json @@ -0,0 +1,51 @@ +{ + "_from": "methods@0.1.0", + "_id": "methods@0.1.0", + "_inBundle": false, + "_integrity": "sha1-M11Cnu/SG3us8unJIqjSvRSjDk8=", + "_location": "/methods", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "methods@0.1.0", + "name": "methods", + "escapedName": "methods", + "rawSpec": "0.1.0", + "saveSpec": null, + "fetchSpec": "0.1.0" + }, + "_requiredBy": [ + "/connect", + "/express" + ], + "_resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "_shasum": "335d429eefd21b7bacf2e9c922a8d2bd14a30e4f", + "_spec": "methods@0.1.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk" + }, + "bugs": { + "url": "https://github.com/visionmedia/node-methods/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "HTTP methods that node supports", + "homepage": "https://github.com/visionmedia/node-methods#readme", + "keywords": [ + "http", + "methods" + ], + "license": "MIT", + "main": "index.js", + "name": "methods", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/node-methods.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "0.1.0" +} diff --git a/node_modules/express/node_modules/send/node_modules/mime/LICENSE b/node_modules/mime/LICENSE similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/LICENSE rename to node_modules/mime/LICENSE diff --git a/node_modules/express/node_modules/send/node_modules/mime/README.md b/node_modules/mime/README.md similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/README.md rename to node_modules/mime/README.md diff --git a/node_modules/express/node_modules/send/node_modules/mime/mime.js b/node_modules/mime/mime.js similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/mime.js rename to node_modules/mime/mime.js diff --git a/node_modules/mime/package.json b/node_modules/mime/package.json new file mode 100644 index 000000000..155d15822 --- /dev/null +++ b/node_modules/mime/package.json @@ -0,0 +1,57 @@ +{ + "_from": "mime@~1.2.9", + "_id": "mime@1.2.11", + "_inBundle": false, + "_integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=", + "_location": "/mime", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "mime@~1.2.9", + "name": "mime", + "escapedName": "mime", + "rawSpec": "~1.2.9", + "saveSpec": null, + "fetchSpec": "~1.2.9" + }, + "_requiredBy": [ + "/send" + ], + "_resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "_shasum": "58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10", + "_spec": "mime@~1.2.9", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/send", + "author": { + "name": "Robert Kieffer", + "email": "robert@broofa.com", + "url": "http://github.com/broofa" + }, + "bugs": { + "url": "https://github.com/broofa/node-mime/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Benjamin Thomas", + "email": "benjamin@benjaminthomas.org", + "url": "http://github.com/bentomas" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "A comprehensive library for mime-type mapping", + "devDependencies": {}, + "homepage": "https://github.com/broofa/node-mime#readme", + "keywords": [ + "util", + "mime" + ], + "main": "mime.js", + "name": "mime", + "repository": { + "url": "git+https://github.com/broofa/node-mime.git", + "type": "git" + }, + "version": "1.2.11" +} diff --git a/node_modules/express/node_modules/send/node_modules/mime/test.js b/node_modules/mime/test.js similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/test.js rename to node_modules/mime/test.js diff --git a/node_modules/express/node_modules/send/node_modules/mime/types/mime.types b/node_modules/mime/types/mime.types similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/types/mime.types rename to node_modules/mime/types/mime.types diff --git a/node_modules/express/node_modules/send/node_modules/mime/types/node.types b/node_modules/mime/types/node.types similarity index 100% rename from node_modules/express/node_modules/send/node_modules/mime/types/node.types rename to node_modules/mime/types/node.types diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/.npmignore b/node_modules/minimatch/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/.npmignore rename to node_modules/minimatch/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/LICENSE rename to node_modules/minimatch/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/README.md b/node_modules/minimatch/README.md similarity index 98% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/README.md rename to node_modules/minimatch/README.md index 978268e27..5b3967ea9 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/README.md +++ b/node_modules/minimatch/README.md @@ -157,8 +157,8 @@ Perform a case-insensitive match. ### nonull When a match is not found by `minimatch.match`, return a list containing -the pattern itself. When set, an empty list is returned if there are -no matches. +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. ### matchBase diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js similarity index 98% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/minimatch.js rename to node_modules/minimatch/minimatch.js index c633f89fa..453967894 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/minimatch.js +++ b/node_modules/minimatch/minimatch.js @@ -813,11 +813,12 @@ function makeRe () { } minimatch.match = function (list, pattern, options) { + options = options || {} var mm = new Minimatch(pattern, options) list = list.filter(function (f) { return mm.match(f) }) - if (options.nonull && !list.length) { + if (mm.options.nonull && !list.length) { list.push(pattern) } return list @@ -853,12 +854,17 @@ function match (f, partial) { var set = this.set this.debug(this.pattern, "set", set) - var splitFile = path.basename(f.join("/")).split("/") + // Find the basename of the path by looking for the last non-empty segment + var filename; + for (var i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } for (var i = 0, l = set.length; i < l; i ++) { var pattern = set[i], file = f if (options.matchBase && pattern.length === 1) { - file = splitFile + file = [filename] } var hit = this.matchOne(file, pattern, partial) if (hit) { @@ -975,7 +981,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) { } // no match was found. // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then + // If there's more *pattern* left, then if (partial) { // ran out of file this.debug("\n>>> no match, partial?", file, fr, pattern, pr) diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 000000000..718cd8c77 --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,61 @@ +{ + "_from": "minimatch@0.3", + "_id": "minimatch@0.3.0", + "_inBundle": false, + "_integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "_location": "/minimatch", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "minimatch@0.3", + "name": "minimatch", + "escapedName": "minimatch", + "rawSpec": "0.3", + "saveSpec": null, + "fetchSpec": "0.3" + }, + "_requiredBy": [ + "/glob" + ], + "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "_shasum": "275d8edaac4f1bb3326472089e7949c8394699dd", + "_spec": "minimatch@0.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/glob", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me" + }, + "bugs": { + "url": "https://github.com/isaacs/minimatch/issues" + }, + "bundleDependencies": false, + "dependencies": { + "lru-cache": "2", + "sigmund": "~1.0.0" + }, + "deprecated": "Please update to minimatch 3.0.2 or higher to avoid a RegExp DoS issue", + "description": "a glob matcher in javascript", + "devDependencies": { + "tap": "" + }, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/isaacs/minimatch#readme", + "license": { + "type": "MIT", + "url": "http://github.com/isaacs/minimatch/raw/master/LICENSE" + }, + "main": "minimatch.js", + "name": "minimatch", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "0.3.0" +} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/basic.js b/node_modules/minimatch/test/basic.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/basic.js rename to node_modules/minimatch/test/basic.js diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/brace-expand.js b/node_modules/minimatch/test/brace-expand.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/brace-expand.js rename to node_modules/minimatch/test/brace-expand.js diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/caching.js b/node_modules/minimatch/test/caching.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/caching.js rename to node_modules/minimatch/test/caching.js diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/defaults.js b/node_modules/minimatch/test/defaults.js similarity index 99% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/defaults.js rename to node_modules/minimatch/test/defaults.js index 25f1f601c..75e05712d 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/defaults.js +++ b/node_modules/minimatch/test/defaults.js @@ -237,7 +237,7 @@ tap.test("basic tests", function (t) { var pattern = c[0] , expect = c[1].sort(alpha) - , options = c[2] || {} + , options = c[2] , f = c[3] || files , tapOpts = c[4] || {} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/extglob-ending-with-state-char.js b/node_modules/minimatch/test/extglob-ending-with-state-char.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/test/extglob-ending-with-state-char.js rename to node_modules/minimatch/test/extglob-ending-with-state-char.js diff --git a/node_modules/express/node_modules/mkdirp/.npmignore b/node_modules/mkdirp/.npmignore similarity index 100% rename from node_modules/express/node_modules/mkdirp/.npmignore rename to node_modules/mkdirp/.npmignore diff --git a/node_modules/express/node_modules/mkdirp/.travis.yml b/node_modules/mkdirp/.travis.yml similarity index 100% rename from node_modules/express/node_modules/mkdirp/.travis.yml rename to node_modules/mkdirp/.travis.yml diff --git a/node_modules/express/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE similarity index 100% rename from node_modules/express/node_modules/mkdirp/LICENSE rename to node_modules/mkdirp/LICENSE diff --git a/node_modules/express/node_modules/mkdirp/examples/pow.js b/node_modules/mkdirp/examples/pow.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/examples/pow.js rename to node_modules/mkdirp/examples/pow.js diff --git a/node_modules/express/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/index.js rename to node_modules/mkdirp/index.js diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json new file mode 100644 index 000000000..6dfa64f75 --- /dev/null +++ b/node_modules/mkdirp/package.json @@ -0,0 +1,55 @@ +{ + "_from": "mkdirp@0.3.5", + "_id": "mkdirp@0.3.5", + "_inBundle": false, + "_integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=", + "_location": "/mkdirp", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mkdirp@0.3.5", + "name": "mkdirp", + "escapedName": "mkdirp", + "rawSpec": "0.3.5", + "saveSpec": null, + "fetchSpec": "0.3.5" + }, + "_requiredBy": [ + "/express" + ], + "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "_shasum": "de3e5f8961c88c787ee1368df849ac4413eca8d7", + "_spec": "mkdirp@0.3.5", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/node-mkdirp/issues" + }, + "bundleDependencies": false, + "deprecated": "Legacy versions of mkdirp are no longer supported. Please update to mkdirp 1.x. (Note that the API surface has changed to use Promises in 1.x.)", + "description": "Recursively mkdir, like `mkdir -p`", + "devDependencies": { + "tap": "~0.4.0" + }, + "homepage": "https://github.com/substack/node-mkdirp#readme", + "keywords": [ + "mkdir", + "directory" + ], + "license": "MIT", + "main": "./index", + "name": "mkdirp", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/substack/node-mkdirp.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "0.3.5" +} diff --git a/node_modules/express/node_modules/mkdirp/readme.markdown b/node_modules/mkdirp/readme.markdown similarity index 100% rename from node_modules/express/node_modules/mkdirp/readme.markdown rename to node_modules/mkdirp/readme.markdown diff --git a/node_modules/express/node_modules/mkdirp/test/chmod.js b/node_modules/mkdirp/test/chmod.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/chmod.js rename to node_modules/mkdirp/test/chmod.js diff --git a/node_modules/express/node_modules/mkdirp/test/clobber.js b/node_modules/mkdirp/test/clobber.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/clobber.js rename to node_modules/mkdirp/test/clobber.js diff --git a/node_modules/express/node_modules/mkdirp/test/mkdirp.js b/node_modules/mkdirp/test/mkdirp.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/mkdirp.js rename to node_modules/mkdirp/test/mkdirp.js diff --git a/node_modules/express/node_modules/mkdirp/test/perm.js b/node_modules/mkdirp/test/perm.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/perm.js rename to node_modules/mkdirp/test/perm.js diff --git a/node_modules/express/node_modules/mkdirp/test/perm_sync.js b/node_modules/mkdirp/test/perm_sync.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/perm_sync.js rename to node_modules/mkdirp/test/perm_sync.js diff --git a/node_modules/express/node_modules/mkdirp/test/race.js b/node_modules/mkdirp/test/race.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/race.js rename to node_modules/mkdirp/test/race.js diff --git a/node_modules/express/node_modules/mkdirp/test/rel.js b/node_modules/mkdirp/test/rel.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/rel.js rename to node_modules/mkdirp/test/rel.js diff --git a/node_modules/express/node_modules/mkdirp/test/return.js b/node_modules/mkdirp/test/return.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/return.js rename to node_modules/mkdirp/test/return.js diff --git a/node_modules/express/node_modules/mkdirp/test/return_sync.js b/node_modules/mkdirp/test/return_sync.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/return_sync.js rename to node_modules/mkdirp/test/return_sync.js diff --git a/node_modules/express/node_modules/mkdirp/test/root.js b/node_modules/mkdirp/test/root.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/root.js rename to node_modules/mkdirp/test/root.js diff --git a/node_modules/express/node_modules/mkdirp/test/sync.js b/node_modules/mkdirp/test/sync.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/sync.js rename to node_modules/mkdirp/test/sync.js diff --git a/node_modules/express/node_modules/mkdirp/test/umask.js b/node_modules/mkdirp/test/umask.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/umask.js rename to node_modules/mkdirp/test/umask.js diff --git a/node_modules/express/node_modules/mkdirp/test/umask_sync.js b/node_modules/mkdirp/test/umask_sync.js similarity index 100% rename from node_modules/express/node_modules/mkdirp/test/umask_sync.js rename to node_modules/mkdirp/test/umask_sync.js diff --git a/node_modules/mongodb/index.js b/node_modules/mongodb/index.js old mode 100644 new mode 100755 diff --git a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d b/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d deleted file mode 100644 index 866c155bc..000000000 --- a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/bson.node := rm -rf "Release/bson.node" && cp -af "Release/obj.target/bson.node" "Release/bson.node" diff --git a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d b/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d deleted file mode 100644 index a7317de71..000000000 --- a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/obj.target/bson.node := flock ./Release/linker.lock g++ -shared -pthread -rdynamic -m32 -Wl,-soname=bson.node -o Release/obj.target/bson.node -Wl,--start-group Release/obj.target/bson/ext/bson.o -Wl,--end-group diff --git a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d b/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d deleted file mode 100644 index 8859b053b..000000000 --- a/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d +++ /dev/null @@ -1,29 +0,0 @@ -cmd_Release/obj.target/bson/ext/bson.o := g++ '-D_LARGEFILE_SOURCE' '-D_FILE_OFFSET_BITS=64' '-DBUILDING_NODE_EXTENSION' -I/home/vagrant/.node-gyp/0.10.24/src -I/home/vagrant/.node-gyp/0.10.24/deps/uv/include -I/home/vagrant/.node-gyp/0.10.24/deps/v8/include -Wall -Wextra -Wno-unused-parameter -pthread -m32 -O2 -fno-strict-aliasing -fno-tree-vrp -fno-omit-frame-pointer -fno-rtti -MMD -MF ./Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw -c -o Release/obj.target/bson/ext/bson.o ../ext/bson.cc -Release/obj.target/bson/ext/bson.o: ../ext/bson.cc \ - /home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8.h \ - /home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8stdint.h \ - /home/vagrant/.node-gyp/0.10.24/src/node.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-unix.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/ngx-queue.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-linux.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h \ - /home/vagrant/.node-gyp/0.10.24/src/node.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_version.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_buffer.h ../ext/bson.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h ../ext/nan.h -../ext/bson.cc: -/home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8.h: -/home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8stdint.h: -/home/vagrant/.node-gyp/0.10.24/src/node.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-unix.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/ngx-queue.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-linux.h: -/home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h: -/home/vagrant/.node-gyp/0.10.24/src/node.h: -/home/vagrant/.node-gyp/0.10.24/src/node_version.h: -/home/vagrant/.node-gyp/0.10.24/src/node_buffer.h: -../ext/bson.h: -/home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h: -../ext/nan.h: diff --git a/node_modules/mongodb/node_modules/bson/build/Release/bson.node b/node_modules/mongodb/node_modules/bson/build/Release/bson.node deleted file mode 100644 index 2db1681b23a8dbe84e4a91f3f3030f04de1e61b1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 65587 zcmeFadwf*Y^)`McnUH}5CTdhvs$-26H3H!x7jJ=N2zMeOfI8HQ<{p0Q7 zGkMP1d+oK?UVH7e*SXAai+##?lgXs%N4yrVk!sG-wBf+qw`9$5&88)41GRItb40z) zPi$$Gp9B&qny5eUrluYEqx?*Oz&ZHzQuT4bW2irddVWrT=7;rW^+^T4{z*d_KYCs6 zpWGz_eyeHvXA5}w(d(~8J=nV(y8IcaH|jSKZTVq)fWTB>JU%(o^0kdO{p+`*(|^}_ z)uT^7_VD7e!_8k^d%=aMTZj*atM$g`_L+A#)Pt(8&($(b%_-Uo*;>>2r_IPbRWoI4 zQ+s8OGiy1vVR6&OT3+qdtc{~Cb}g>=@Kfqn`g&<=K#GGh7vWQdPc=R@_&D+L;8Tmw zGJJT{;j;ptl_b>R1vcWd3LhUnx8uXZj}JQtkifAHpC)|n#OE%2?#1Ume0bcC&x80h zJj(EGu}wI!FXkL!2(XVr86_QBbQTlS1@+_2o%`OV55KR>-}dfhh@181CKdHl_qWx<-+bv8dtPZi?5o)M!2G(@&ny>y+g`W#)zuk+`g^{8 zeEa%|Pw)BS;7xM|y!h#z%SW6uCFzy>PkHj>EqNQ3CO9{?_CEcyPdvFxw>E9LcIUwt zre)dRIXdGH=QSig_?P}x_seg7X4_G^yJqJD|7iGp!y_wGKij-|eclU!FZ#933Eg$( z{`260(ML;Sgy#HSb4oOQ2c1J5te-_kQ3vS{lXXh^$FMUZ^r<*~myV_m@-OeJQU&V|!)zBa1dt&te!)ej-w#U$}7>BlX#}X-^{E0FA2O`yC{hkw} z?|m`-IZux?pu7iTuyu5XF4*CxY!P`^)O_-~2nZ+(nE{5nS788LL52)t4U${!Hp zFKel^I(S+gmt)rZ>oN5$;L#8AzhW2< z^}WiVFX{ry2Su zJ-|@^bDZxn%opXYjTujAjK2Sfv1e9{eyd{UD;@LIK2_7QA^TJtgKd(3ADkpilc`O}kR@_k%yQtVIr^_}_(oO-LBdRQyk2{+f_H3`zEYs+vA8EihnGOKCnaPU8iVgX>_+t=sQ-mKOOdM zzC+Xg3B4#k69Y&FKi%#Wt-tm%=6mZi8qPHBe66qc9_I`C&jV1uLiE4EP=5sUR_LSK z_vg^3`2p<17#HQA2LErFD03*b-;a)3kQnWQJl1~%1-IUcI1YVSzZBy*=GU}0RQ(j_ zHw4D=DY_E#kpg49p!oa4pHpBz=5TEPJjUM+`?f3o8t8Ad-#YAHP3vU7$^J7iUR$B2 z{Y}Z+ivDaJXpAF8`y#Z1{dWjb(6_=Ljs6mV9~^^zIbfl@0Q#%Pd@TV@eZEC~Ci=@# z^$y50&fhSMw;AK*{D}6@qkfvEodY>+Z-E_;2tRsS^P8|~ej_Ptrt4yyiAz?aFv z<2V)B`wf_n8tALfhY5Tw=szCgr2blAVOeE$RiVpWFfD zjuQJ~77eeeE^%ZGnZDpgN3nahv#2Ix=xj&PqO78diUmc*i|lNZF(lXFbg_pj_rz&= z8HFQe6;*f~cDp?nG78ITYYS_@P)&{8OA8kdQGI1(Oem@*I3Co5aX%CA~dUR8p@yF8V! zLpNg?Hp5YB>~(Zj1$yK7zNfP;78#LW<&2h-F;v(72V}{?j+$Du$nia#xr=khFUHto z>>M$3-iSio4z%5fsYT936ETl94*aQd|CGUG4-p2LIjif;>s=_z4;_)`aOYNw`IuNW zQBBMEaz{mpy_^#-{E7Mu%Px1-RJ$BZ((ii*9=P zI8SM*!#O;=$Xz5H^K$85(LPDVCd2U3Nk>g|DM!*ZR>K=MGRNVj5o;YKxlTB!(_QXx z*=wcujdePUmWqBNt3-8w`4mT08QhgCqelf;C;J^f>__#Zgo@rWhOp;6x6@HnnK5Lx zv)t`~`G*a)&zKzD>5xKwmDpK_l?hXz5KYUg%CJ7wBYA72PPqiBalh49RzwjxvthP&r!kKn8nmad~bz z!rt}MqUkse@B~RFc#Kd&@_&PZi)F|EQL-=kzXN7hq>5qqnOqq>s_2czAkexf)$7t=hI z3mi^SR-&v5cPX>i)`+s`t|D=dQE(IHKeXpXo#nqxUpod{Kp~M-vYn zZZNSh4o$)|C2XV-qFD5xyS+Xl?4$M@qg4%OP(3WY413Q@Z|F#aGWvXX(MNSYYJ9b` z*g*|?P#|wuA!;&4sy%S&ST{D01s=CUx7?5R;*22^97Q$C%k;@olRBZu6{A38vdAhT zQO1avAf=++=&{Z+;nZ?AjZQ@6T;T;0y*nE6)M#xoM(NfP*-Tco2kS+5gwb|AE|0J~ zoz>3oRhx@^xY!WFtz!72CfeZT8F>}eORyiS1Z%vrx-v>ByKo|zi)Lgyijn?|Tk1xF z5P71+4>A?OD5-rSqul)E&|1@QbD;HE(7}K(@qY>MHG(dQO!t*)( zddaY^yGbON>-LowX?D&;PkW2>1zc&AyJb$=CALK-P7S9UjW5@uzph@`Co+c1dDkn3 zW%IHfBL_c>t->qe zdpL)V40FvccQ2F|!egtD4C4a!``QgDu15Ok^c1_RorP}QX?r$j29~Do<6%pNS3u;_ zQn;zXAn<(YbQxk~VQrlgGctskRUpn79b< z53;o`Rg9N1o7ESiS{WjeW<_Q=5|w|*QW1aBGf3q{k@yug6XJqK@0F@fUs9!~jO
  • t^T+G{)9oqKSUWQ|qym!{^9mt4 zfy;A`DzA1GE+}$2hBHd)w@_otY4C_r-VO z=|#Nu$1uNHdsx-6eQJ3*o`xi9f6@8RD#z2g1nsz@Q$#)Nf2P#Ibsp>Ev_7DDJc;Kd z=s;j0SwD?F^QQ{lf9_M0Hvovv`8!chvZQFxugQiW9t8z#&4%?h7W_?^N$Jg>4q zuQG+NOqF!zbcxsEc_!rO@@sNx4HtB>844T&-}2!ea{0uaJB* z6nYhIQ}`!^XH-hQ9EG(CA5)lEE$b&cB|f5XtVhzSX=z6)wI-($6S-P2nDe zt@W}#-7E2ah2JP#wMv%XbDP9Dt0h`*mw2ASUnm@>aE8L`6kgz${KFKEQJAA}w!#|} zE>d{2!dn%-s_-p^2NdS5k@AZb-l%Yi!rK(CRrr9yUnzV>;eB{cP5b^*;Z}ub;`ubo zFH$&2;U#!afaL=okl6XC#2K3;ey5P{rjpOnAu(0q0EMXfj~;D^i*W<+TwLMV6C|Cj z@M**g*1xY%E0uHs{Fmh`6?)-Uq~CJgBSSg8i^yZPRT!; zb^|WKOS8l^3O6bIPGQbUS?^F-t8lHtO$uLBxC`r%@-D)9B(7gA@hOF$D&)%`tUpiT zP=yl|9-fH$b2V+;Y>6&~n-ngfolyTW#|!+~Vu{6^FVOqeOH905Vu8Y>dnG+t;gAiI zUZ(J6g?;h<4&~I{FLBeu5)Uc7q*>B8Dtzn_Ngq*oFV-2`585R0ZiR;wmOLiQfBm?` zZ#dpj;LnqIG3N_(OQXaK?vi*8^+x#}3X7;8=-LM*{+9iNzKZ<=-RuW=^%D{=m;{`z zX$y$s@eU6C5qLKH0p7;)bgTy=bb1VE8m(zZTO>BTD)IPh5_hyp{5AQn#XCU6L{0k@ z=K;9qPKm>)Kj??4H!ziY16NUR;MmE)Gc@fJwgY-MN_>g+pueJ>fY*}mG)=2ixQ2+i z+N|&ug_bF@yilP_;TuzB`4rfI?Fx_=3Cx!mRJd!Fq_sH`eG1ntmh__v?_hs9nzj*f zme|66uGF*;`vHDx z|0v(A@D~nA|4QKvrIKE>P~t{~e^B@@g@emw{d|RYD11%fR|?Z^lzej(u2I;c@VLT( zizMGXg-r@~DmyGr6iw@IA8T4JkD;zv_}3pDM2 zu_lN&OaqS9v?D|l-rwTB09?mi@#^4s<;vv6FEW^sZ)!Cl#J}MA9dyC(5p;oe-m( z3QwiIKwqNp357S%9w@(^;{^V3sl>b$5`V?^7vSAj+84N-?S7_d|KdCXJ4tWAK1ex3 zafLWb;(Me|<@Z@6en%Q(zn%5KpV8j%p={!%h+o80kOz~00^YwQj>J39#7i{ouf&@) zt#3YXy{6?*4)9{q7b31vf8f)kJ&1RVJHS^-*WjH;@~6UI=-0rbq{}odo#O%)C|s*B z2W?pHQuvm_^PvOFmnwWq;TKL>KEWl?t#GAV(r-Z*w(GlC;uB30cits28DnAn8qPCt zzruLVH|TGvFV@C4^gCb!{S5dail?#F@8BY*l!cSJJmB{7j*X{tG$Y+ayj{ zE%ANEA(Zd9UE;wxz!CT^73~L{MtcB9v0q>g`vpFEx5O>Y5{Hm~68v(3#QpRK&=a`6 zfInkA0M21t1g;?eWKDZW;gg&n(3{yFxJTh}&O7L0+7CFJ_5tovcr)dK-l*`;D@`YcWR zfc3zqSP!(WllV8*_lF$%A+Thg#I>|5=)($E&zJO<3Vrl9)Q@sV{GCDv{R!ne6#l(d z(s$7wC{Lr`0B@(i01qo{;{1XBkahu%r(J;C6rR=~>1NsmkLDkLEam4|9CLZxlYpaf1Gm;{=x8Dse8y33`vhfgCUB-*UXb*EwF`Z5%H!$aMi+ z!f|74uM|skQ4i1)IL|;YJR#Sg;y&KD121mHifS%{7B(3g}ut8 zoU!x=$eW_@B*zCj!6WfPg~Jq{QY*_ZQCOtVsql2#1?_*P@XnhheZRsj3U?@cl70n# z^9qT{3O`sW>B9=YR+zwf1An!`)e7%bSVsS2y}|;{CuoPlB?`R?Hz@p4;fUKM|BVV) zDm3|J`5g+~oX-Tjzh5HpR?=8&e7VQ7@8(=Tm8+g;563^HuvCjhF#hP}Z!c2v~ z=lVwZADO2Bw>~U!5d6}Fx|It-!-mf&{8V9Txh%(qUxnnmMq!P@|53P4VLY#cp!aZv4uy9rd`012+6{d7)JQz8 zFq`=w$~P)Jp)l1Y%O6m9RAC<1EBH4ld|`>Ce@B0N0RF*#E`#3?k@J15FzF6SFS%FZ zU+$Cm?fnwZd{koX;}RcZyDPB&5?A56n%D<{z4vmJ0I`Xr`5Au*NXD%P}F#7ulQ zOJRq?AsipdS1asLsBwHKpRLfVu!-YD`8o6(;3S1h6mC*je~+yHwZiume$DZsT^Yv< zT(9s?4@o+eeu(lcg*6J#d_91TcpRQPv=9_&@D zPXeC_Yh}n9iD$E2wx(SHns_1UGm%%52A=5wrr>*NlmqNfIlw`Sf!ARD-6e4Y=L2*v zjtf}E`2gmVFHO@90Zp*Qr=%~^w9A?#{+;v;=(PkGr)d`u5j&13oJ2m*BS;U%eh4(d z*QSz2F5ysEr|@otzgCzJG$B@05}{io5p(+>(T_Llhbr#P~W;4qV6mWC*V7 zZjm@(rNrh2iEpl!c%1$RzP;p!?UEZM{_bvx&)qBW$THwSz1#2SU23X@jL@(~I@;kp3-r_={Hunt&>>niFC+)2L!PN%;BvsjO@-^~7i zssAIfkp2bwH!L56yySm9q4m3t(!#;`g^0r+A*T03 zo=CLfI)qrH;RXq8fO0btbe{L9f>}kB35`#uj(Nk@Hb{guhF3WaNBY zVC}TbZN+~6tQp0jM=kivBlKu8{x0NcumdeakEW1jb+8>|zVA=oqy2YfrzK}@Lln(U zvs{TPpjq1tG^Z`xiVnVu;%HWV zZAKq+e%9utc$0T$%G?_YTG4;r5yVY&e+aJbJ(}vj>jd~4_qk8^dr!#zlLDg5mzox8 z_wG!cThQtskR}Hp$L}jR;cNAIzVmCoH+@smEWVj(W@=0QO~d>JC;Xo8d`AmfOPkoA z_h_2Gz|wfA_S7rRs_W-J_0><~E`Hya;xG7aPir9mxX*qJJ&Cq{`!PQXSmixx-Xq)3 zf`DAVJtg4T4>9Nx2KU>OaWXs&=VsM%KU$DPX~5HAD8$61&_uvwIwO$or}^9wrTtmv zz|<3g#hUfGGrVnat5aal(Qo>)%%0B!juSLoVB0U)NX(!2sLgM;1UxDFm?0{KVln&> z(2PZ)`C6f*Zva(H7TUluxHiPOk1j8?AUg%?qMB4kfgKmISlD}lukJg4U9!(}!tp=C zG3;+td^e=UK@p?H$X zra2b>jN~hFlb0k>qlC`fX?W#jgdX9`^plB)oVgR#mdJ8K%{j=sN-b4mgg_y`iw+HM15# znObn&RY&@VqafmN;pACVuX&dGZz|mv-f}5AAL(oJz8P2lO*!hU*=r; zb2run*-Z7{xWLo-JZP~bq&>y>25@k**#m@*4UBESviFj!Gg>-N(cSr`cHdX^-}YW| zF}kq6yeD9;4`h#R_o*hHkJ$5Ui+AS$4zLw1Kc`*KhwB)pknsfj1tuRM2!mn_8f)=m zxtasRQ)s)JEW&(Pcd(#bYX=aG5R(`mjtbj}h#AoRN$7v2zaTkK*X}zJ@En1qn!=_k z^;x_vaifoU{tBn_El)E;Eepd)#Qc`%nUR6QxPIr*Qwxq;x7$;(hN(8zvnSbmvX8al z0wfVDio7RNtPLA*YF&$DN%NjO)!JZ+LmFdU<3up=p1j4{(1JLz@?;3ro+5uQe{On4 z3!-)(f1IvKAb-CYdpMZx_r*iCV1FSY!P;;QBwE+Pq!5@0fh5;_B1nt1VJGB!PxiJp zti}ng#|ao2GXdU{eXR{=3PpYR>pj`e+Q8q_@Se0baeaB(yyGqAK;8b;_MIZYUMYPT zW8dk|-*4UC;>+L3nSd3I_5S>we*1o3euu0x`&2vX{}%N0o{Y0L%oh58EXa6k!|S47 zv$b(L;teJ1GF2!-+s^ZJ|AX-GdXaV!`ugA;n33Z$NOEDx>BB0rW*J=wIwj!;+xv;n`Uue0CxzTr>(HAJgI3m&2yT6YD?oDPcq4l zkTp;S1NrSO7zh-m>gegm>zz!!t&Q2xBsdX&guQ4N7-bZ+v2LGW4&)|?vpwE#;-HkZ z;bZt3c5^Ni*;B4LcKD_h7Jq&_m9lqGDcDU{Dq^%$+4(M{CPC&vfv)ox%w##la341s zzC#&4CAQ~4e%SC(Qw!co0cyDl`4kn_Ma;to|5WpUz|ANFACRgcM*SXe>qh46#Q7oz`T1;nyigWkWazjA0oS>^U01s#MT_G<0v%po=mbf=72v| zf&BgcIIgKzy&uPWzcE=G25^RuIq5DeJ%i5Mri&#%ozjgp8rE9{lQ-B$Zf}1rCq8(} z4@`kgj%_K}f-CTcachVy%lkh!TECd{uKWNwfvOn^LP8E?)|88cUMzf zCYxBdGl2`6tYM$~Onq!JBqo4Q_;Yme=TOE!AymIK@m6c&d(<~G>8%sq6YNFEMjT1L0K%2s~HJ-}Dj;rmA^_NgHpX_iILzXJJ(6@yrX$zl@WGCunUa;*7> z{q_$y!-(*HEC^1tzGH}96ZRhG*rLH!6~JP%YapV|HYH0U7euQT}`;t9}nR zI_vg2Zhrnr7XMk^SIxdj7Vo#_#hKnt^U6Pqfgno^6dYittVaSw4+}F;Ju&nh3um%a zTx{ahWqzzK;(8;ppL?+1pmUXaV*7z9F)6SnP5GL2)o*CXlh*AgeRaD#Cy98QzZ+ZC z+p2eHlnevOJi?Ebc5#)Fnugw)12A@rq(GWhna_2-@;~$@N3lBpZOKu$-=D}KyypER z9!uQQO)S{Z)S}?dYw^WlP-qhw7Y)}G;}%`6AO27I=l&Q)nj-!=1=n){ga|z+$+37( zm{yF&)zjLWIK8vY{!@K#=`s6O6tQX$H}stFrFKJ$wPCU-LTltlPvN}H{*o!+*-7pc z3IaDw5?%v^W|)gP%K>g?`7J*C4r{|21RH<;4m}L8Z9a>#kiF17pR_h|*YM9t`nuKR zPVr}j>T44lt&Qx@KR4bFF)D&kqf^LgZ_$Uwq9LM#FqdyvO#SfadhZFd^|q4`BjbYb zD9%0>hFBR>o)lY{^fbHJUa_HteNUEvWcX4B4n9b6xQTD zFGD>?TDMb|5Wvp5+^`>Tg&Don7(vvk5|R5qF+W9Mpg8yziz0q6RqI?U)`M~6-E<TPIYJUwRQ%LES-mNo4^x<%X7aM ztomR%p}WG?%k{%HD*e@kF{1EtLyap}FBf8hZ^5$0u7Z`AfAoe-9w%s2+Vms)5{LBe5pe0N6+h>Vzv;IR@}sxb&x?R zYSU@_BZ>^MIpEo*n~>9)`YYo~+16#Hh!XG_!gJ?LT`lAKi;`Ls74(0URQ9X_N5CE# zTT~SDY%{Jd`?g>zrmD1wtI)dkV>$Gw73!JM^T@CK=_3!$rjOiWiCa-3VnKMv!s5NT z=bkG5jh_C>-6(dvo$I<31+7x0c%NZ4mO|C>|1qfv(_G#O@?044A3p!XWw95`>F1#ykH6M7=HT(SmaWB zGfWex+bt3Rkr-@=w>QH`y#Er;2BvOq+~dwcqVRCWA@nNt0gj9r0V1P*Kh?LZKWYa+Y^Tw@O)4RJ%q)i`&92_kU^D9%E|ymY*D{ zgHPa|BeIBWblKHd?~0TnHlz;vUM0T!H)6)mp~R7^KlzdA#JwMb`!oD8?uF6|#boN6 z1h#7TsmK=&8~Hll)orTJ8$G%;>VL6zk9!*5LUl2o8@eO+o>mpYU;%4G$p#p5wc+#1+e~EDP3~wkw{nD@)wj z*p($83}*3hA`1IU7$CtWAx*+S3F#8}uo?}9OBf}APxVolDIr_J1PN0lt}|m z(Rooh_;ExA>;VmVc=WG3jxTGT?mG-_rQJ5*w~r$Y<{KL<>bAk2s2)m;4esf-K~q$N zg~kRqb=zQeRD%j*gJs<|I5(<6jj@5Z+XjC!8wc$+Hn_dp23w*U)c$Y-{%0x?vo1F_ zXzI3yF~$bMfsAeL?zYXpuqlT-5w2!zb6>Y@nASwv&?${=9_+Tws;D+}Ut^p5yKOTm zstq08*yf3D+w_ZS!zE#C^Q&&#>_a{i84nkavCY@&=SoWdQkoOBpf@7#ChCXvAg;muIoXu^TQvC|i@Al|pD`M% z`Sj%sfZUYer4hO3QcuV|MEU-l36%S3l-w@*S{sgIK-76@lx!Ph+k*d#1q^v-D7ha~ z?#jC7L)hf34fjHFV+a>ZiMSRH<*K{qVk%;80MGmBYe>cl+P~+e8ILz}f8x>}1TEzs(`XmiiO=-?Ik^<;i4xn@?2Sk0M>#Xe zJ3ETEiwe?*smA0eHPTm3eO^OJ@M36!-fid|a2RvuUxFu9$&Fw8ug5z+>%rq~4TmK3 zxV8XA6!;e?5gC`6&kL}-AdN$@TD#nB1dV$cyqMRQwBa;A8L^BH#n3b=xZ)W|@FZe$ z;~{rlq?>`zyB;EgHzBM;;nh;_)zmv@4qoELFH7m&bTwSV6=(&46C(nL&=Bz6piK(o zHU+tP61E5^ZR%`9*_rU{K~ z#heD(e#M z4L7WeJq>1vQX?~#7xhq{)kAq<59NytHzN|@y`vS?ylbnp3q;>=ti`toA{}ny z{2S&ke3#bvn1ww0P<78SCUm7uytVzl_cgp4u+`cy9!eN)?K0+CSroao-N0)$+@yxl zr9Y}ggqPpN8x7XAoI~*hAslq^K4T7EH@zJdFVZ&~TXxwG0P*U=2iofV4}Aq4D{2{} z?T7i6Gb9J>2Xu4#^A7~<+dze17E%K>-XG;->)<(XbBssacDFY0NnH57g~8|+x88$u zP^g|eW$X*`t@S1;Tm}MppWA*o=!x(dHV;Ie>w2D@fj@nAf(3}<`uha9cl(vtOAK~% z_vS6#KcQGWpIYIJHLT~Oo)wz%Zz!}EUCBHHcYH5_c1HVfG1|9vuH=II!6&+@dwJtB z*7UfOp!e$!2E+S@SU=10{&b*@&oAU$jmPt$1nWA!C&9Nz8V-x~gLj_! za6i{Gz0p;Bb_Ge+77BQ-uUnhlJP!hoBaXC2J5=_ zz>u+r18+D=mGale=QlUDte(1uZ}#Nxv~JITV0Hd(fBr_i^=4gLE9K#}+W_9Y#ZKV2 zZ;TEZQv1lVYthGn%cV>0E%El|E1=k_zhSWkJnee$f}O&fN;6(ziLpjhP!c`BVumP6 zi>YALImaLt<~2%wKFWWEoSz^$>?p=?$LrV3q|JCFxY62hKHS^)nm<#$+w>I;It~-D zF_?%N?-BX6i~-8u4_Ps;6l2~LIQ*@L9x8XkKh zh`k}5`Z~QXyj`O1DLSKdZMj}&d<=Y7&vTXb)4d=#9mDxCp+sHxa9xC-&|*s&k1fRt z2Y&${E%*iR#qhc&%0AY{FOgeIQ_Pf$A<7ii#(lCLYka<5Z!m|o@fp+y@Fq59`PINI ztn`h(13p9wa~eX3cBJ1P>@xOEkr}Zz)}w`gD%coM%wm^3^5@-M*si1C?w8cOf5Vt@ z^Z{{Qwr}9uI|Cub6nq+tyX@j&NANW2Bd%Lx{rNTF&x}DqjxEN~ zp6I#&HezDCmfj07T}zvydGWQsO+gEnL~6K``1052 zjDDOR-~+y1*kJZ_p2Qq+d}8hDVFs_vVDgx6s^pFRb5;;&?9GBTBCeSsjIYh>yn2=3 z3;Jom#aic@h(SUb{Jlxe@ML|@$Vr>`%rw-KZ)r|)G>q7^cvvIwq+fnUgKNMk`7`(qPqwKl2ZBPB= z-a7BV{h+YHW&YLEV0Qi+t7fn<&$9&Yf-^?)VWbW(a-)vy-8Az1EHb1H+ZZ;wOC6dq zg=*to@iURP8DgnOh|j}(aUD{ug8>(>03y!VKM2^x3)$@1FdukN;{itZ{7t*;dokr` z+pev)zeTqE^|+?>+4swR9{KZL-Anl+ zEu*CY-vg>a4ZhUHZjn%|w!bbq+JQ_1c?Z7j5YE+6bU54{uL9tu4wyycW%FnsdlLga zFBs&2L?RO2NafaQ7^xq57IV#wx>T`5_3Iar?APn@F3LaL*D_yzhkHR(xwT;j)j%D; z%_8>+nFnB!XJXb3dBE>E6U+megJV#`H;Aljr^)FtSP9N}SI@rw?>OTlP_4A7Yluu2 z(MP7F`%Iv>CQQZ4aCh?o~>t&M4NnxI-{x2l;@ z53}-XbO-RgI(_e+@oJY%`F+|UG6-X=9Qf0tSFjy75%u^g9C}mmUfut?&hsPaoWoo( z8*?3HdF$PH$c2Vo+uVSm;Yy9~p?0l2fXZF=W~@3bSPf%FtJ3*!*B0WYe3yiFE977o zi;4QWv`CHEfE8D#*m_g2_5QDcp*6gq@w!-eb?asp@13c%ITtYiJz_HCF090I3pQ@o zx>9m$U4#c|Dxc}B@BBMsj((kbC5!-CfAcu}BNGwjf)~JpjZJ!bS-Ep#q!GT)b`;hT z?~sa8`M$4IXP>Yec6#F>zRx^Le81l!Zi(`XQvuHbedBM$T?@1OTHH^b=}#GbrgeP_ z()@||9uszn##I;vo?YCa4;xi8aR1EWkH>7_S%xko{Ou|JCWFkM(R_VN=d&E(eD!l0 zi=%C_Ouu_;LT+iV*6S6GsUh<3mbR!H@ z++0OVy#-J44I};rMp&Z`+^r;Qd9+5~>Kf5rI?uvro?v%8Wzjs*BZGY+0D!51dz z`blR9tE-OyC~n6^j|2_FFtHeBuAjcC_}P1hda5%^`CjY~%cnQnm3FLW?%6NyLa8Qqc zBEK|(mi#(voZn^kwwZk{eg~QBG6i~uAHmSe5FJ91W~Mf2 zTXr;rk}KA|2-AFUB))1JVG^CGN{f2#g9WBf2yXUs?EKEER8~HI&>t~kkm2885#tx@ z8NJ{qaa!DG8gjCDtOnnKx%`QWrd`OkkupaL)POJ^^?eNwXTk7$Ft~H=?Zs~mMor5$ zyvS{s8N@vMR(Dm#A^+8Z9FwmXUp$y6e!C{QPK7la`h{hp9vcO+6XSfn@NPDkEqKG5 zwh2VO;)hfAfoWkwDy~QRD+;!H)_Z%IU?cU*G#>r^kyc?*F+b|}F~ZLS@sk#KF+(?C z{!ZN9iSvxWFEPC7{Vd-5FFYr?44GXlkNakCyeY~FaQ&@IIas&P`kZ~=w&YTuz5QTi z`=R{32P^mCi>p5S-b41i2lspw{P4iCi#->4Dzt*R`^=%WsAFheh?&E5R2ZH9{#z-0@J`cW@mA z$?Jq}#?+X?tE4VdX>FK;X8t%n-82}M-*U&dqwzB!6XwZBzI)t#{U&|>P{H}eH0a3= zN_cmjS90nb3eslkTi(048bnM611Pu!Wo|R>>Hg4pFLS}B1KDci#zwK?f$D`N} z3HDJj?6_I9-*`hvF^}ODb>@{Z%;3GYD|5qBLUKhEGdZ!ejDr+isr7+NZ`v-?ZEPeM zmcv-W!txuSqReRV{QbFTNr%s@p>Yj$vcklQs8WnDMk)p}!!Sv>Ipv9m$nYHo`)*Tw zu#^`h@_VqSMt(jcxDuB*T^+=8Wt5X$g0}w5`O$hPwM(K(gPCNFiz?%sC_{PU6X->| zdgd@)5!`_bqaW8B1{qu{xThFQKGTuA(XHJM>&fndG*9_&_$;|vw%nf`Vv`9=iw|^dFJ_yIP`~?92j`T1$na>eH;j#|jD`o`(sT$eBh`vp z_=oB9;=`K`*$g*)%uCsNco~B0Wq;;EqYY+x0 zSi1&kGDu;;);Gn7fl+1!Jcnb~I=0T)ND{;^M&S1KT&TczQ({F1L-_GNf97(dV!|Xn zdqB_xL#lZR{v8*~`j1g;R6j<6=Ze3C)Xp^`z|1s??%v>ybyR}UXwDBX2lK!w^Ffi% z;P()47Z9&R?Dr;`X#(8QiWb4&Fyann@FPEdW@YMb7OG!LV|s%Jos2a23R_)!XC{7> z1^J5ql)yNX4;Pj#k=>&OKeiUgj*G)@5&7cyBZgs%rlyHW8eOp0UBRWy7fBEW(XpI7 zxh+Jm0^I%dPZjzx59Cioh0~XIC|JKNL9;eiGii;y8p7>s9*UqK`tq#z*R^{SP5R3z zGU3zny?OXC6+a$0L7jHwed3phkb>=x$@k!b`Fm*)H3k}_-CcuxZ6I5MoA7&3d})?}EID+<~@cj6fn4A+K*&tKue z_YZgDcLd~b*F|3r@Ooh7Q4zWLdolW3VdCcw{k`F_wakaJVmQ zOSD9q`18!GVix_+izzMI4054s_9>)jUTk_sYw15vIBGl@z`w8d%iBsdat#OGDIz=L&)Q%w2L&L8VpXnF9&-rj|0_FwCxJMx+T{$}>ritNgL8cbJUGU*oYh ze1h$lzsqB7e3`}aSD+HCjaxyrCH4t!28!nfZHaw@@t7NKT*h~YvXcF|Dvyo&jXv_> ztr>k983ICdIs3MNXH!(bvv11Shi~@!v-o30{!F}(?&fcv>RI1w_-Zd!IpqmUK~f`s z-w*s5`;g222=*$;?$3q-!P*qbHg$)wY{H_e2e)T-)p`DN1ob@d&LDz z3k_cC&Zpga1@bp>dJW2{Xrns-j3mZsZp$5bBV41U&pvnQ%l9Q&l2LRCX6(HSz^L z=&&vE6d}E(tMu7Y_D*9PDZBHLw!}oyY_~r;8$?gnvYD|>urCKj|A|V=F*6&;M(;ED z^c1c=GDoH5JunCL1rVK<+c(fNqa{P8wP6H>cbAs;=sE_^#&cbGa9MMtCXBZL_*95%lkQIDKpD_ z@iAYo;Cf_qVmpC3qwkr}=t8fkB_Fj|^gSpzjTvNScFb_QnY#wQ{RXf0-)w#X58<^J z`E|}f4%a~dL$Gf+lKYtSgl|#_|-j3`i3LHO{$yN>jF+8uMHx7K(f9$8oKe@*BiRQl&dQPc`+c3tj{GgV?)?P zF<+fm7YEjcN1-9&g49%e@6@R2HPjTHamg5;uVFTiOys!2u918bn=lK%wrO=5K%J&~6F-8ORJfHotC{nKdp&cs0zSju$_u zA@5p9f6o{0pD!*vxKhF!Mt8}#b?tG?Nyec-T?hZKGEKii^ySQC6p723cF)7&B4DTCB4B4X z7Xh$8eh>e_{|SXwwoV#_5qpmycf$|SyUz+d%Tr}9@8{-FJAT*oM971mz%FRTP4w{p zOK=+69eK1F&E)?<0KE-J6G zRe35ZYzvmU9j>lCE_adBZCg_AUTC{0?IQgDH{~u@c~zOsQB`8AF13|Z7kerlRc>2l zk#mv5X}f6Nkc+geh2<3`kt#=RvBOc~vegth!FiF(wxHTmRf4|86_wb=G|gawleDtg}p+kn^&@ zVJotY6LV&(cG{#F2H}6CD5@%{a9nov#45MLSyfaqzT8n!k_AJ$2VQKu+BSp^Rb1_K zdTJoURb6GP#Ndj`9JVX8X7gfR6$r^Dr`aJ#w@>0XFYaz0TvUHJ9D>Kb=>HO6)E z#rQuZgO4Y%cn!VTHt+33BqvYr7R=>t21>rLzO}gQphN0t&*ug_Z7d!Li^xBG%BSb<@Bi z?xi)1H2A;btHtUMt5;Hv6^yv#bhtfEG|Tztlj_6RT=)R%O6g^YY(BML+vsEv4I7=(4mtcD3&T{_wv}L-mBn|3x zL?+5$j_@ROJM?Y~qZIl7y^roDI;=tXMEIGm3kJ>6(hE34x_kboM*X8TnqF10)OKUh z;v!eEvmEhXE(w>-<*BKuc4A+|+BjcsAcH2l?3Fd{r2{Y478g}`9JZ^jBKu`m%j!b> zKZ>NBj#5{l~#g;njUDZ*Y;jgn#nM9o!P4S6zh z;Xjkbjw(kuF z_h4FTP;s$FWGMZQ9gEAIZVwW=#jqd-Q{osiMyfs=iR_p$`Bktte4)ofZ{M})iin)= z5rB=#U0&Q%_w>Z9qKb+IMa7E-E*>>zOfF)0xyy|_^-7YnxJIr{7tJr}R!d#K9>xbR z_#Wjj>K@wBIzpidBJUH*$njnst%e%k%uBj8F0U4P5@VYyvig~}t3(nmB3svKQ1Xix z7CH4Uq>GIilZ|{aR=6~nK}FS`*-#;`I_LnpfGLiuG6WSG+Az$B2-7+7S8T>jl;Kco z=AEI?PMkLY`QFBFfbGBoKz_F{?ygXXzrH>kn3@D!kFVl@zXX_$`c1$MxIZ%d?of#T zQOzb`2XNaxkcZnd$AQCvCHICx^MHqe8-Upxzz^I8JO(u17Yg<71wLRlu;JlQ$PJu{ z%ezg${lNXe9)CI%+5+5$cjAt)z9keIfVaEW*zgYcFrJ2YQ4ZV>46gmLh2DG66h7UraQN+Wz9|7F-C&(fFStw+K zKISjbA23z9j%nEp{J+K)(>cjz^J>`NhVlXUtZoX0#6?O!%lLlDldMT@bFFsG*;iaL zH0|eUsL#YF8*_gY(pcVMK(1$FhWw}#?wwMPT=jb|9PwHfTI~4jS@hKQY z*JQ()9?FZ&Cy?ra2}Q`_+-DMP`?B9@vvdI9{tYpCaf}-8a+&|rG?<%ctr87jB!~H^DB(KV2U`?b;vw%fVM2=EJ(4sQSn;^a1Uu`vmas0DrcT ze~Q;UZB=}+9tY?bHQ+l8z8OY7U2bMXb?UGYy#28+oP)R=9%tSvvo~S-ZSnD|O%W?k zN+SPW@LTQ+gKF-kR z#3T{_&_@IIu)iTTnkPd*Oq;mR4NagI{b>%`WMZFtfo+~Lv_TwS6(9dpFVTb%DLd&} zan3>6MSttjb|?16-%>}fq3smqE47CH#!I_%-_3nK6gtzmcEFG2!zF7pyVxJNKR4|Q zh1NpX@OZ#qAo%Bpm$(=|cvD{pg;qhf`5J>h`aBgF=P5ghJ~;<%YF?D_-+Y0g4PtR2 z)aS^#NaCK$wC~}3fjW*Y9(#I58 zc<7dPtN{NO*l{S_&dmQyFxHRO7<9)jNjowR*^K;SAI`h*zZU6Y4yu_m%|njD93;$( zwZ}agQ7kVq(y>YLeK2SGSnWx(BgBNHxOGNK&e49zu1B7;q`R?}bsg&*@M}9lp@Z-P z^UYY?F=LIt!nn@H3%_53Hd|f`g+7Iy%>2#4m^N`Q8|;I1#yzD1`B(qdq0ktdhxr#P zXF(DEkK0qv!Frl-dZ$U7kCPK7{6V=^Z!#@Dl{H_OzU-szjZ6Q!58iz|NaDD8^p{ET z&nKYv-Gl@be4dd0agx^Di{~%)8q|;!|8=hsIDa)U{p~*5w~61f)nEI7|M8@={*tWi zNlMtCtnEum!1+DNY4v@zoyiGbCTo94PQW>`EfF3=+Io{|m5EAR*8iYYo14Dds@-Es z!0FSb^gmj)PsRD?)Oc0gXcWH_m;RhpyE{G==dZ-4f7?&{I(`z)-!-SNv1<1xd=UrY zH7V!2R`-Lr-}M{tEadk~c+jdH?U#UaT`Afo zWJ-QN?qHuMtlBm!m_bh)~-(fL$X%Gvzt+Lk0}dhe>5e$nyh_m`oaw2*2MHj zlC_r;$#O6;{ZO*jY7{t1_AHsF=A>?-n( zOn9W1HY5GY9E4$dFoxP!^( zz0+5_yKlm)eYJ=BCg40`@Q}9CWZEut%JPQNgvqw(W$ zsVHubOAqzao`|1{^T*5y2m8ezGAFe6iw`7>JJ>h=p9#M=q2j~D^e6gi_gJpQ`PY_& zXRPs$_8yM&zxGc5LtpL1J_~UER8smUeYH=M642~lN$l~#WS&2h{BxXtF3wl>JqPDI zI6Zgv8}N};d%j=7@2%Ra{St5<83xV$-{Zd)_^$>2Yk~h-;J+65uLb^Vf&W_IzZUp^ z-U7K3cnuZef1n&jp7T=Ld4|N53fC!o7}yW>uL6^SnK_!qYlZ_)Xxdps{I`DR0C8tU z!~Y;yJqp5*pKEDg!X>w6#_a{(1~}tPP2;`L6ns#m9sdZ=hZN1{6sO<=7HtUR^W63e zO*<8sa=xaW2IOTtR1gQ=@T;!BP>i9PvW|fslcs4Wfe$O5p9;tL-xI216?C`vhRr8fGZIEMOVJY8>DO^~BmO2HI#l?kuC$Pp{2sJAl zQbwVCus?_|C%K@Egd2;|6de{-mKURCH9EncL852b%K{g;i>oX77NRuUkNQhLO2mhL zkqSTJ2h$op=wm$iJnC!AHTmdo{OrYtzQBWg{O~>=eU5zeLw>~bG0^!rK!AJ+#z`a0s;()kwSoVs$|ah>ubp51}wq4&#YOgdi;c=+LcLH(d?@-D|m z=W7NJKcnWOP#xssaVtJLAL9Z)XNwy7NAGvF;-g)(&GK_LY^@(QbUu5`YcEqE@BPlaw|)aiUL;+(OLRg7x~-i*|tD9qP} zbIK#%2jKeve4Ho!(B=IB1v=kh@Ez`fZ@+=>DEN-{&@cZ}Rn9H@WxP5Dz7sgt`y*11 zPw>(EJ%TuUxa(wEea@ah^ippB|4L;V?Fl)%F>TX;N$aL{m@T|!l(3#cVQd&)z3g* zjSu^!9&`<(yknngS{85KszaBj(?(=0;t?-2moZ1uvS-E?Ny`()5J}5XGge4CK_Fv- zq&cIo)h`4GSQ*Oa9D ziE^$TNm~VSjqr$9GsyKIY0irDdr6-vX!^ILxm&}B!?g4$eSeMDP8ap`OCIrRSEv6; zS_WSFnWUws*}^olj>r*@NMoEgv-U-ZBj&o~VL}$cSfuM`LN+lFw632C9zOzf(**p% z5r-XPGLZdIt_fMr^}saHoJrCPfhiO52M76M`{$n5Vd#G)=-d|k!C}&@+B%@I{|7)f zKfzil?`e4|{&GAo;4ByY4N~vHVKWAw-g4X@R1bftj*8RhTNE85lBy8`~ zwEIEl80P0`(7OMz{fnUWgK^dIRQ%EA4#D_ae~x!TvWJXdCF~V(2$OA6pBXqweRB_no1h<4Li=KLeU}qpJTI zX#Jr4!7;S_pG74H+vmsBKLnaN29Hy5Rk8_mN*R8}8~t)TPlIlUvuBI?B<&s0&3-(V z15N#ofz}VoKfO2V@6fb=qCM+NLAO_F8lQVpUL$DzpuUgA(C@|2i83UMCgf+%sUM_A zl7@cs0F++@+Ngh34FBz*n`-f_3hRXYFBC~vNTW`BzeH0$30ZJVZPGjPuFeE~XUj;8V9 zB1c=3 zVgpV6Z!yrUe-`wRwQ_w^-rksco7_@`Q1Z`@p;yGvFUQdP3^e;c47#aFUf;04(;zf= zlBN}*j`kVDPSnBEvKaay&`lm(ZzI5x|2@z(`4M}Yu*DnKYd_HXK~d+&&_h5E?2!B8 zDTvp(G4KD&JtAnR!7-+V?EQTH*Ll?%- zH^#kE)e@a9#elFQk)jpo9~CQekN%BpgwoR&>b=K3DCC3v7ewl=Wk*v z=ng3%1^ZJzX#HT+@?f0 zX!73{LpOs??vU38r(l=Z5>x*k=$896Z6DfFp9{}`k55Hh0Zselg4PepuL0eRq=)Z^ zkiR9S{#Xp1WVjY*`)dp|^`8xTNC!@E(0=m__2l=((EkJ4xIeeX)c=|FH)|T-8KM5? z$rMz`XMb0K-ntm?La6y)1iA_F>Q=>nuYsTa|HeSG{T>6&`V%qqX@(S+^#ctw+m8or zj5mdbdh!Qi=+{7J*2?{l;Ta6<}*jIUgJcy55{z1MQFztzU7^8#D zWOi@*7+C`XgNZGSXV1xFwng+|5dyGH?G}&>95%J{fDk0&je$WJb&@_d^BGt+2*?(^_|_MW6Wf6d1Wr5`N9}n zomZ}YnRvHW8*{tf+$`6I55~;$<$!vW$LBSw&Q{%#8jd@3!(^)iu12aOCvU5l$H$*; z9z8qfX{J}FU$t>pG>!YaoP|%}cotU~Vt$*c9{tO|)Q*h*zRmKKu66t+x$kDX?XnxF z4jvoOfBCCkS)MVvexgEzXh7wDTE`1B3-;ki{^viM&4*?r4{NQvs8`Fo{SgNGA4k$^ zY)pKm7mHub^m;Z~M3*yK|8KJ|n0B(Z}Ct>he6$d(qJ#XqWa}y*uDYDli6YzN#&}mDp(p_jlq~R+i>`VG=w{S4s#^PI(C|l zhK|aV0wg2bd_-sD2ge8uYbjnEP{Vl`gfGN&T38%oeVjIupW-26retrpZLE0PR2|g|8XLduv@3G zpy~;b7AU89%;Hv!RuT%OfLE~%0R%9OOs&J~|bh+Nyz{e5=J?i&mn-`hf zZWM}8E|P?cja40KU=hX1r%#jb-%rmXW0PV5v!*mJ#lgj%$h2zT7P9h}A)XYTp-%=u zr(92K)b;dw-w0C#bP-q~O|4|L~1R$K-%FbO*)dr3v2{0LbFC!U=hn>(KrR>$g)PK_t%zO<#olsAI9 zzj+ELcWLr~UMW_M1*U2nUzSyrFrY-^{jHq884}^bugiz@4cX#-Jl#W2wWFVsu5pgV z{)piSZ`7}icgZdjYeoj|JWZ>U0&h-*4a8Mq1G?MnWPj3mM2fM2ue%)}D2VPP4-^1u z`I>Aw>PK|$7=+=;H$x(r?Izz)kDRY*ez#dgq~^i|$~038qy<{<7Y2kCBf(H!hU9T7 zr5LJ`W6-o1d$8EPIMCN6lx#>gyp|{Je-UCgS%*2sa@+sVIbzu{uZ{&Es{jxkcbdn| z>IMIoC_*{52ZQ=B@p+xi{*HOe4a|q)ehUqqP-aD|L6R!CEEiIwo7F6p3E{(n3Aeqw zvx0JIYt%o_c4(%QO1l0Apox$ywGC~+&9h>cRe?%)wS1kdG3e3Z*a31nN~qc#+cRsg zrdO(w3w8HQ<{p0Q7 zGkMP1d+oK?UVH7e*SXAai+##?lgXs%N4yrVk!sG-wBf+qw`9$5&88)41GRItb40z) zPi$$Gp9B&qny5eUrluYEqx?*Oz&ZHzQuT4bW2irddVWrT=7;rW^+^T4{z*d_KYCs6 zpWGz_eyeHvXA5}w(d(~8J=nV(y8IcaH|jSKZTVq)fWTB>JU%(o^0kdO{p+`*(|^}_ z)uT^7_VD7e!_8k^d%=aMTZj*atM$g`_L+A#)Pt(8&($(b%_-Uo*;>>2r_IPbRWoI4 zQ+s8OGiy1vVR6&OT3+qdtc{~Cb}g>=@Kfqn`g&<=K#GGh7vWQdPc=R@_&D+L;8Tmw zGJJT{;j;ptl_b>R1vcWd3LhUnx8uXZj}JQtkifAHpC)|n#OE%2?#1Ume0bcC&x80h zJj(EGu}wI!FXkL!2(XVr86_QBbQTlS1@+_2o%`OV55KR>-}dfhh@181CKdHl_qWx<-+bv8dtPZi?5o)M!2G(@&ny>y+g`W#)zuk+`g^{8 zeEa%|Pw)BS;7xM|y!h#z%SW6uCFzy>PkHj>EqNQ3CO9{?_CEcyPdvFxw>E9LcIUwt zre)dRIXdGH=QSig_?P}x_seg7X4_G^yJqJD|7iGp!y_wGKij-|eclU!FZ#933Eg$( z{`260(ML;Sgy#HSb4oOQ2c1J5te-_kQ3vS{lXXh^$FMUZ^r<*~myV_m@-OeJQU&V|!)zBa1dt&te!)ej-w#U$}7>BlX#}X-^{E0FA2O`yC{hkw} z?|m`-IZux?pu7iTuyu5XF4*CxY!P`^)O_-~2nZ+(nE{5nS788LL52)t4U${!Hp zFKel^I(S+gmt)rZ>oN5$;L#8AzhW2< z^}WiVFX{ry2Su zJ-|@^bDZxn%opXYjTujAjK2Sfv1e9{eyd{UD;@LIK2_7QA^TJtgKd(3ADkpilc`O}kR@_k%yQtVIr^_}_(oO-LBdRQyk2{+f_H3`zEYs+vA8EihnGOKCnaPU8iVgX>_+t=sQ-mKOOdM zzC+Xg3B4#k69Y&FKi%#Wt-tm%=6mZi8qPHBe66qc9_I`C&jV1uLiE4EP=5sUR_LSK z_vg^3`2p<17#HQA2LErFD03*b-;a)3kQnWQJl1~%1-IUcI1YVSzZBy*=GU}0RQ(j_ zHw4D=DY_E#kpg49p!oa4pHpBz=5TEPJjUM+`?f3o8t8Ad-#YAHP3vU7$^J7iUR$B2 z{Y}Z+ivDaJXpAF8`y#Z1{dWjb(6_=Ljs6mV9~^^zIbfl@0Q#%Pd@TV@eZEC~Ci=@# z^$y50&fhSMw;AK*{D}6@qkfvEodY>+Z-E_;2tRsS^P8|~ej_Ptrt4yyiAz?aFv z<2V)B`wf_n8tALfhY5Tw=szCgr2blAVOeE$RiVpWFfD zjuQJ~77eeeE^%ZGnZDpgN3nahv#2Ix=xj&PqO78diUmc*i|lNZF(lXFbg_pj_rz&= z8HFQe6;*f~cDp?nG78ITYYS_@P)&{8OA8kdQGI1(Oem@*I3Co5aX%CA~dUR8p@yF8V! zLpNg?Hp5YB>~(Zj1$yK7zNfP;78#LW<&2h-F;v(72V}{?j+$Du$nia#xr=khFUHto z>>M$3-iSio4z%5fsYT936ETl94*aQd|CGUG4-p2LIjif;>s=_z4;_)`aOYNw`IuNW zQBBMEaz{mpy_^#-{E7Mu%Px1-RJ$BZ((ii*9=P zI8SM*!#O;=$Xz5H^K$85(LPDVCd2U3Nk>g|DM!*ZR>K=MGRNVj5o;YKxlTB!(_QXx z*=wcujdePUmWqBNt3-8w`4mT08QhgCqelf;C;J^f>__#Zgo@rWhOp;6x6@HnnK5Lx zv)t`~`G*a)&zKzD>5xKwmDpK_l?hXz5KYUg%CJ7wBYA72PPqiBalh49RzwjxvthP&r!kKn8nmad~bz z!rt}MqUkse@B~RFc#Kd&@_&PZi)F|EQL-=kzXN7hq>5qqnOqq>s_2czAkexf)$7t=hI z3mi^SR-&v5cPX>i)`+s`t|D=dQE(IHKeXpXo#nqxUpod{Kp~M-vYn zZZNSh4o$)|C2XV-qFD5xyS+Xl?4$M@qg4%OP(3WY413Q@Z|F#aGWvXX(MNSYYJ9b` z*g*|?P#|wuA!;&4sy%S&ST{D01s=CUx7?5R;*22^97Q$C%k;@olRBZu6{A38vdAhT zQO1avAf=++=&{Z+;nZ?AjZQ@6T;T;0y*nE6)M#xoM(NfP*-Tco2kS+5gwb|AE|0J~ zoz>3oRhx@^xY!WFtz!72CfeZT8F>}eORyiS1Z%vrx-v>ByKo|zi)Lgyijn?|Tk1xF z5P71+4>A?OD5-rSqul)E&|1@QbD;HE(7}K(@qY>MHG(dQO!t*)( zddaY^yGbON>-LowX?D&;PkW2>1zc&AyJb$=CALK-P7S9UjW5@uzph@`Co+c1dDkn3 zW%IHfBL_c>t->qe zdpL)V40FvccQ2F|!egtD4C4a!``QgDu15Ok^c1_RorP}QX?r$j29~Do<6%pNS3u;_ zQn;zXAn<(YbQxk~VQrlgGctskRUpn79b< z53;o`Rg9N1o7ESiS{WjeW<_Q=5|w|*QW1aBGf3q{k@yug6XJqK@0F@fUs9!~jO
  • t^T+G{)9oqKSUWQ|qym!{^9mt4 zfy;A`DzA1GE+}$2hBHd)w@_otY4C_r-VO z=|#Nu$1uNHdsx-6eQJ3*o`xi9f6@8RD#z2g1nsz@Q$#)Nf2P#Ibsp>Ev_7DDJc;Kd z=s;j0SwD?F^QQ{lf9_M0Hvovv`8!chvZQFxugQiW9t8z#&4%?h7W_?^N$Jg>4q zuQG+NOqF!zbcxsEc_!rO@@sNx4HtB>844T&-}2!ea{0uaJB* z6nYhIQ}`!^XH-hQ9EG(CA5)lEE$b&cB|f5XtVhzSX=z6)wI-($6S-P2nDe zt@W}#-7E2ah2JP#wMv%XbDP9Dt0h`*mw2ASUnm@>aE8L`6kgz${KFKEQJAA}w!#|} zE>d{2!dn%-s_-p^2NdS5k@AZb-l%Yi!rK(CRrr9yUnzV>;eB{cP5b^*;Z}ub;`ubo zFH$&2;U#!afaL=okl6XC#2K3;ey5P{rjpOnAu(0q0EMXfj~;D^i*W<+TwLMV6C|Cj z@M**g*1xY%E0uHs{Fmh`6?)-Uq~CJgBSSg8i^yZPRT!; zb^|WKOS8l^3O6bIPGQbUS?^F-t8lHtO$uLBxC`r%@-D)9B(7gA@hOF$D&)%`tUpiT zP=yl|9-fH$b2V+;Y>6&~n-ngfolyTW#|!+~Vu{6^FVOqeOH905Vu8Y>dnG+t;gAiI zUZ(J6g?;h<4&~I{FLBeu5)Uc7q*>B8Dtzn_Ngq*oFV-2`585R0ZiR;wmOLiQfBm?` zZ#dpj;LnqIG3N_(OQXaK?vi*8^+x#}3X7;8=-LM*{+9iNzKZ<=-RuW=^%D{=m;{`z zX$y$s@eU6C5qLKH0p7;)bgTy=bb1VE8m(zZTO>BTD)IPh5_hyp{5AQn#XCU6L{0k@ z=K;9qPKm>)Kj??4H!ziY16NUR;MmE)Gc@fJwgY-MN_>g+pueJ>fY*}mG)=2ixQ2+i z+N|&ug_bF@yilP_;TuzB`4rfI?Fx_=3Cx!mRJd!Fq_sH`eG1ntmh__v?_hs9nzj*f zme|66uGF*;`vHDx z|0v(A@D~nA|4QKvrIKE>P~t{~e^B@@g@emw{d|RYD11%fR|?Z^lzej(u2I;c@VLT( zizMGXg-r@~DmyGr6iw@IA8T4JkD;zv_}3pDM2 zu_lN&OaqS9v?D|l-rwTB09?mi@#^4s<;vv6FEW^sZ)!Cl#J}MA9dyC(5p;oe-m( z3QwiIKwqNp357S%9w@(^;{^V3sl>b$5`V?^7vSAj+84N-?S7_d|KdCXJ4tWAK1ex3 zafLWb;(Me|<@Z@6en%Q(zn%5KpV8j%p={!%h+o80kOz~00^YwQj>J39#7i{ouf&@) zt#3YXy{6?*4)9{q7b31vf8f)kJ&1RVJHS^-*WjH;@~6UI=-0rbq{}odo#O%)C|s*B z2W?pHQuvm_^PvOFmnwWq;TKL>KEWl?t#GAV(r-Z*w(GlC;uB30cits28DnAn8qPCt zzruLVH|TGvFV@C4^gCb!{S5dail?#F@8BY*l!cSJJmB{7j*X{tG$Y+ayj{ zE%ANEA(Zd9UE;wxz!CT^73~L{MtcB9v0q>g`vpFEx5O>Y5{Hm~68v(3#QpRK&=a`6 zfInkA0M21t1g;?eWKDZW;gg&n(3{yFxJTh}&O7L0+7CFJ_5tovcr)dK-l*`;D@`YcWR zfc3zqSP!(WllV8*_lF$%A+Thg#I>|5=)($E&zJO<3Vrl9)Q@sV{GCDv{R!ne6#l(d z(s$7wC{Lr`0B@(i01qo{;{1XBkahu%r(J;C6rR=~>1NsmkLDkLEam4|9CLZxlYpaf1Gm;{=x8Dse8y33`vhfgCUB-*UXb*EwF`Z5%H!$aMi+ z!f|74uM|skQ4i1)IL|;YJR#Sg;y&KD121mHifS%{7B(3g}ut8 zoU!x=$eW_@B*zCj!6WfPg~Jq{QY*_ZQCOtVsql2#1?_*P@XnhheZRsj3U?@cl70n# z^9qT{3O`sW>B9=YR+zwf1An!`)e7%bSVsS2y}|;{CuoPlB?`R?Hz@p4;fUKM|BVV) zDm3|J`5g+~oX-Tjzh5HpR?=8&e7VQ7@8(=Tm8+g;563^HuvCjhF#hP}Z!c2v~ z=lVwZADO2Bw>~U!5d6}Fx|It-!-mf&{8V9Txh%(qUxnnmMq!P@|53P4VLY#cp!aZv4uy9rd`012+6{d7)JQz8 zFq`=w$~P)Jp)l1Y%O6m9RAC<1EBH4ld|`>Ce@B0N0RF*#E`#3?k@J15FzF6SFS%FZ zU+$Cm?fnwZd{koX;}RcZyDPB&5?A56n%D<{z4vmJ0I`Xr`5Au*NXD%P}F#7ulQ zOJRq?AsipdS1asLsBwHKpRLfVu!-YD`8o6(;3S1h6mC*je~+yHwZiume$DZsT^Yv< zT(9s?4@o+eeu(lcg*6J#d_91TcpRQPv=9_&@D zPXeC_Yh}n9iD$E2wx(SHns_1UGm%%52A=5wrr>*NlmqNfIlw`Sf!ARD-6e4Y=L2*v zjtf}E`2gmVFHO@90Zp*Qr=%~^w9A?#{+;v;=(PkGr)d`u5j&13oJ2m*BS;U%eh4(d z*QSz2F5ysEr|@otzgCzJG$B@05}{io5p(+>(T_Llhbr#P~W;4qV6mWC*V7 zZjm@(rNrh2iEpl!c%1$RzP;p!?UEZM{_bvx&)qBW$THwSz1#2SU23X@jL@(~I@;kp3-r_={Hunt&>>niFC+)2L!PN%;BvsjO@-^~7i zssAIfkp2bwH!L56yySm9q4m3t(!#;`g^0r+A*T03 zo=CLfI)qrH;RXq8fO0btbe{L9f>}kB35`#uj(Nk@Hb{guhF3WaNBY zVC}TbZN+~6tQp0jM=kivBlKu8{x0NcumdeakEW1jb+8>|zVA=oqy2YfrzK}@Lln(U zvs{TPpjq1tG^Z`xiVnVu;%HWV zZAKq+e%9utc$0T$%G?_YTG4;r5yVY&e+aJbJ(}vj>jd~4_qk8^dr!#zlLDg5mzox8 z_wG!cThQtskR}Hp$L}jR;cNAIzVmCoH+@smEWVj(W@=0QO~d>JC;Xo8d`AmfOPkoA z_h_2Gz|wfA_S7rRs_W-J_0><~E`Hya;xG7aPir9mxX*qJJ&Cq{`!PQXSmixx-Xq)3 zf`DAVJtg4T4>9Nx2KU>OaWXs&=VsM%KU$DPX~5HAD8$61&_uvwIwO$or}^9wrTtmv zz|<3g#hUfGGrVnat5aal(Qo>)%%0B!juSLoVB0U)NX(!2sLgM;1UxDFm?0{KVln&> z(2PZ)`C6f*Zva(H7TUluxHiPOk1j8?AUg%?qMB4kfgKmISlD}lukJg4U9!(}!tp=C zG3;+td^e=UK@p?H$X zra2b>jN~hFlb0k>qlC`fX?W#jgdX9`^plB)oVgR#mdJ8K%{j=sN-b4mgg_y`iw+HM15# znObn&RY&@VqafmN;pACVuX&dGZz|mv-f}5AAL(oJz8P2lO*!hU*=r; zb2run*-Z7{xWLo-JZP~bq&>y>25@k**#m@*4UBESviFj!Gg>-N(cSr`cHdX^-}YW| zF}kq6yeD9;4`h#R_o*hHkJ$5Ui+AS$4zLw1Kc`*KhwB)pknsfj1tuRM2!mn_8f)=m zxtasRQ)s)JEW&(Pcd(#bYX=aG5R(`mjtbj}h#AoRN$7v2zaTkK*X}zJ@En1qn!=_k z^;x_vaifoU{tBn_El)E;Eepd)#Qc`%nUR6QxPIr*Qwxq;x7$;(hN(8zvnSbmvX8al z0wfVDio7RNtPLA*YF&$DN%NjO)!JZ+LmFdU<3up=p1j4{(1JLz@?;3ro+5uQe{On4 z3!-)(f1IvKAb-CYdpMZx_r*iCV1FSY!P;;QBwE+Pq!5@0fh5;_B1nt1VJGB!PxiJp zti}ng#|ao2GXdU{eXR{=3PpYR>pj`e+Q8q_@Se0baeaB(yyGqAK;8b;_MIZYUMYPT zW8dk|-*4UC;>+L3nSd3I_5S>we*1o3euu0x`&2vX{}%N0o{Y0L%oh58EXa6k!|S47 zv$b(L;teJ1GF2!-+s^ZJ|AX-GdXaV!`ugA;n33Z$NOEDx>BB0rW*J=wIwj!;+xv;n`Uue0CxzTr>(HAJgI3m&2yT6YD?oDPcq4l zkTp;S1NrSO7zh-m>gegm>zz!!t&Q2xBsdX&guQ4N7-bZ+v2LGW4&)|?vpwE#;-HkZ z;bZt3c5^Ni*;B4LcKD_h7Jq&_m9lqGDcDU{Dq^%$+4(M{CPC&vfv)ox%w##la341s zzC#&4CAQ~4e%SC(Qw!co0cyDl`4kn_Ma;to|5WpUz|ANFACRgcM*SXe>qh46#Q7oz`T1;nyigWkWazjA0oS>^U01s#MT_G<0v%po=mbf=72v| zf&BgcIIgKzy&uPWzcE=G25^RuIq5DeJ%i5Mri&#%ozjgp8rE9{lQ-B$Zf}1rCq8(} z4@`kgj%_K}f-CTcachVy%lkh!TECd{uKWNwfvOn^LP8E?)|88cUMzf zCYxBdGl2`6tYM$~Onq!JBqo4Q_;Yme=TOE!AymIK@m6c&d(<~G>8%sq6YNFEMjT1L0K%2s~HJ-}Dj;rmA^_NgHpX_iILzXJJ(6@yrX$zl@WGCunUa;*7> z{q_$y!-(*HEC^1tzGH}96ZRhG*rLH!6~JP%YapV|HYH0U7euQT}`;t9}nR zI_vg2Zhrnr7XMk^SIxdj7Vo#_#hKnt^U6Pqfgno^6dYittVaSw4+}F;Ju&nh3um%a zTx{ahWqzzK;(8;ppL?+1pmUXaV*7z9F)6SnP5GL2)o*CXlh*AgeRaD#Cy98QzZ+ZC z+p2eHlnevOJi?Ebc5#)Fnugw)12A@rq(GWhna_2-@;~$@N3lBpZOKu$-=D}KyypER z9!uQQO)S{Z)S}?dYw^WlP-qhw7Y)}G;}%`6AO27I=l&Q)nj-!=1=n){ga|z+$+37( zm{yF&)zjLWIK8vY{!@K#=`s6O6tQX$H}stFrFKJ$wPCU-LTltlPvN}H{*o!+*-7pc z3IaDw5?%v^W|)gP%K>g?`7J*C4r{|21RH<;4m}L8Z9a>#kiF17pR_h|*YM9t`nuKR zPVr}j>T44lt&Qx@KR4bFF)D&kqf^LgZ_$Uwq9LM#FqdyvO#SfadhZFd^|q4`BjbYb zD9%0>hFBR>o)lY{^fbHJUa_HteNUEvWcX4B4n9b6xQTD zFGD>?TDMb|5Wvp5+^`>Tg&Don7(vvk5|R5qF+W9Mpg8yziz0q6RqI?U)`M~6-E<TPIYJUwRQ%LES-mNo4^x<%X7aM ztomR%p}WG?%k{%HD*e@kF{1EtLyap}FBf8hZ^5$0u7Z`AfAoe-9w%s2+Vms)5{LBe5pe0N6+h>Vzv;IR@}sxb&x?R zYSU@_BZ>^MIpEo*n~>9)`YYo~+16#Hh!XG_!gJ?LT`lAKi;`Ls74(0URQ9X_N5CE# zTT~SDY%{Jd`?g>zrmD1wtI)dkV>$Gw73!JM^T@CK=_3!$rjOiWiCa-3VnKMv!s5NT z=bkG5jh_C>-6(dvo$I<31+7x0c%NZ4mO|C>|1qfv(_G#O@?044A3p!XWw95`>F1#ykH6M7=HT(SmaWB zGfWex+bt3Rkr-@=w>QH`y#Er;2BvOq+~dwcqVRCWA@nNt0gj9r0V1P*Kh?LZKWYa+Y^Tw@O)4RJ%q)i`&92_kU^D9%E|ymY*D{ zgHPa|BeIBWblKHd?~0TnHlz;vUM0T!H)6)mp~R7^KlzdA#JwMb`!oD8?uF6|#boN6 z1h#7TsmK=&8~Hll)orTJ8$G%;>VL6zk9!*5LUl2o8@eO+o>mpYU;%4G$p#p5wc+#1+e~EDP3~wkw{nD@)wj z*p($83}*3hA`1IU7$CtWAx*+S3F#8}uo?}9OBf}APxVolDIr_J1PN0lt}|m z(Rooh_;ExA>;VmVc=WG3jxTGT?mG-_rQJ5*w~r$Y<{KL<>bAk2s2)m;4esf-K~q$N zg~kRqb=zQeRD%j*gJs<|I5(<6jj@5Z+XjC!8wc$+Hn_dp23w*U)c$Y-{%0x?vo1F_ zXzI3yF~$bMfsAeL?zYXpuqlT-5w2!zb6>Y@nASwv&?${=9_+Tws;D+}Ut^p5yKOTm zstq08*yf3D+w_ZS!zE#C^Q&&#>_a{i84nkavCY@&=SoWdQkoOBpf@7#ChCXvAg;muIoXu^TQvC|i@Al|pD`M% z`Sj%sfZUYer4hO3QcuV|MEU-l36%S3l-w@*S{sgIK-76@lx!Ph+k*d#1q^v-D7ha~ z?#jC7L)hf34fjHFV+a>ZiMSRH<*K{qVk%;80MGmBYe>cl+P~+e8ILz}f8x>}1TEzs(`XmiiO=-?Ik^<;i4xn@?2Sk0M>#Xe zJ3ETEiwe?*smA0eHPTm3eO^OJ@M36!-fid|a2RvuUxFu9$&Fw8ug5z+>%rq~4TmK3 zxV8XA6!;e?5gC`6&kL}-AdN$@TD#nB1dV$cyqMRQwBa;A8L^BH#n3b=xZ)W|@FZe$ z;~{rlq?>`zyB;EgHzBM;;nh;_)zmv@4qoELFH7m&bTwSV6=(&46C(nL&=Bz6piK(o zHU+tP61E5^ZR%`9*_rU{K~ z#heD(e#M z4L7WeJq>1vQX?~#7xhq{)kAq<59NytHzN|@y`vS?ylbnp3q;>=ti`toA{}ny z{2S&ke3#bvn1ww0P<78SCUm7uytVzl_cgp4u+`cy9!eN)?K0+CSroao-N0)$+@yxl zr9Y}ggqPpN8x7XAoI~*hAslq^K4T7EH@zJdFVZ&~TXxwG0P*U=2iofV4}Aq4D{2{} z?T7i6Gb9J>2Xu4#^A7~<+dze17E%K>-XG;->)<(XbBssacDFY0NnH57g~8|+x88$u zP^g|eW$X*`t@S1;Tm}MppWA*o=!x(dHV;Ie>w2D@fj@nAf(3}<`uha9cl(vtOAK~% z_vS6#KcQGWpIYIJHLT~Oo)wz%Zz!}EUCBHHcYH5_c1HVfG1|9vuH=II!6&+@dwJtB z*7UfOp!e$!2E+S@SU=10{&b*@&oAU$jmPt$1nWA!C&9Nz8V-x~gLj_! za6i{Gz0p;Bb_Ge+77BQ-uUnhlJP!hoBaXC2J5=_ zz>u+r18+D=mGale=QlUDte(1uZ}#Nxv~JITV0Hd(fBr_i^=4gLE9K#}+W_9Y#ZKV2 zZ;TEZQv1lVYthGn%cV>0E%El|E1=k_zhSWkJnee$f}O&fN;6(ziLpjhP!c`BVumP6 zi>YALImaLt<~2%wKFWWEoSz^$>?p=?$LrV3q|JCFxY62hKHS^)nm<#$+w>I;It~-D zF_?%N?-BX6i~-8u4_Ps;6l2~LIQ*@L9x8XkKh zh`k}5`Z~QXyj`O1DLSKdZMj}&d<=Y7&vTXb)4d=#9mDxCp+sHxa9xC-&|*s&k1fRt z2Y&${E%*iR#qhc&%0AY{FOgeIQ_Pf$A<7ii#(lCLYka<5Z!m|o@fp+y@Fq59`PINI ztn`h(13p9wa~eX3cBJ1P>@xOEkr}Zz)}w`gD%coM%wm^3^5@-M*si1C?w8cOf5Vt@ z^Z{{Qwr}9uI|Cub6nq+tyX@j&NANW2Bd%Lx{rNTF&x}DqjxEN~ zp6I#&HezDCmfj07T}zvydGWQsO+gEnL~6K``1052 zjDDOR-~+y1*kJZ_p2Qq+d}8hDVFs_vVDgx6s^pFRb5;;&?9GBTBCeSsjIYh>yn2=3 z3;Jom#aic@h(SUb{Jlxe@ML|@$Vr>`%rw-KZ)r|)G>q7^cvvIwq+fnUgKNMk`7`(qPqwKl2ZBPB= z-a7BV{h+YHW&YLEV0Qi+t7fn<&$9&Yf-^?)VWbW(a-)vy-8Az1EHb1H+ZZ;wOC6dq zg=*to@iURP8DgnOh|j}(aUD{ug8>(>03y!VKM2^x3)$@1FdukN;{itZ{7t*;dokr` z+pev)zeTqE^|+?>+4swR9{KZL-Anl+ zEu*CY-vg>a4ZhUHZjn%|w!bbq+JQ_1c?Z7j5YE+6bU54{uL9tu4wyycW%FnsdlLga zFBs&2L?RO2NafaQ7^xq57IV#wx>T`5_3Iar?APn@F3LaL*D_yzhkHR(xwT;j)j%D; z%_8>+nFnB!XJXb3dBE>E6U+megJV#`H;Aljr^)FtSP9N}SI@rw?>OTlP_4A7Yluu2 z(MP7F`%Iv>CQQZ4aCh?o~>t&M4NnxI-{x2l;@ z53}-XbO-RgI(_e+@oJY%`F+|UG6-X=9Qf0tSFjy75%u^g9C}mmUfut?&hsPaoWoo( z8*?3HdF$PH$c2Vo+uVSm;Yy9~p?0l2fXZF=W~@3bSPf%FtJ3*!*B0WYe3yiFE977o zi;4QWv`CHEfE8D#*m_g2_5QDcp*6gq@w!-eb?asp@13c%ITtYiJz_HCF090I3pQ@o zx>9m$U4#c|Dxc}B@BBMsj((kbC5!-CfAcu}BNGwjf)~JpjZJ!bS-Ep#q!GT)b`;hT z?~sa8`M$4IXP>Yec6#F>zRx^Le81l!Zi(`XQvuHbedBM$T?@1OTHH^b=}#GbrgeP_ z()@||9uszn##I;vo?YCa4;xi8aR1EWkH>7_S%xko{Ou|JCWFkM(R_VN=d&E(eD!l0 zi=%C_Ouu_;LT+iV*6S6GsUh<3mbR!H@ z++0OVy#-J44I};rMp&Z`+^r;Qd9+5~>Kf5rI?uvro?v%8Wzjs*BZGY+0D!51dz z`blR9tE-OyC~n6^j|2_FFtHeBuAjcC_}P1hda5%^`CjY~%cnQnm3FLW?%6NyLa8Qqc zBEK|(mi#(voZn^kwwZk{eg~QBG6i~uAHmSe5FJ91W~Mf2 zTXr;rk}KA|2-AFUB))1JVG^CGN{f2#g9WBf2yXUs?EKEER8~HI&>t~kkm2885#tx@ z8NJ{qaa!DG8gjCDtOnnKx%`QWrd`OkkupaL)POJ^^?eNwXTk7$Ft~H=?Zs~mMor5$ zyvS{s8N@vMR(Dm#A^+8Z9FwmXUp$y6e!C{QPK7la`h{hp9vcO+6XSfn@NPDkEqKG5 zwh2VO;)hfAfoWkwDy~QRD+;!H)_Z%IU?cU*G#>r^kyc?*F+b|}F~ZLS@sk#KF+(?C z{!ZN9iSvxWFEPC7{Vd-5FFYr?44GXlkNakCyeY~FaQ&@IIas&P`kZ~=w&YTuz5QTi z`=R{32P^mCi>p5S-b41i2lspw{P4iCi#->4Dzt*R`^=%WsAFheh?&E5R2ZH9{#z-0@J`cW@mA z$?Jq}#?+X?tE4VdX>FK;X8t%n-82}M-*U&dqwzB!6XwZBzI)t#{U&|>P{H}eH0a3= zN_cmjS90nb3eslkTi(048bnM611Pu!Wo|R>>Hg4pFLS}B1KDci#zwK?f$D`N} z3HDJj?6_I9-*`hvF^}ODb>@{Z%;3GYD|5qBLUKhEGdZ!ejDr+isr7+NZ`v-?ZEPeM zmcv-W!txuSqReRV{QbFTNr%s@p>Yj$vcklQs8WnDMk)p}!!Sv>Ipv9m$nYHo`)*Tw zu#^`h@_VqSMt(jcxDuB*T^+=8Wt5X$g0}w5`O$hPwM(K(gPCNFiz?%sC_{PU6X->| zdgd@)5!`_bqaW8B1{qu{xThFQKGTuA(XHJM>&fndG*9_&_$;|vw%nf`Vv`9=iw|^dFJ_yIP`~?92j`T1$na>eH;j#|jD`o`(sT$eBh`vp z_=oB9;=`K`*$g*)%uCsNco~B0Wq;;EqYY+x0 zSi1&kGDu;;);Gn7fl+1!Jcnb~I=0T)ND{;^M&S1KT&TczQ({F1L-_GNf97(dV!|Xn zdqB_xL#lZR{v8*~`j1g;R6j<6=Ze3C)Xp^`z|1s??%v>ybyR}UXwDBX2lK!w^Ffi% z;P()47Z9&R?Dr;`X#(8QiWb4&Fyann@FPEdW@YMb7OG!LV|s%Jos2a23R_)!XC{7> z1^J5ql)yNX4;Pj#k=>&OKeiUgj*G)@5&7cyBZgs%rlyHW8eOp0UBRWy7fBEW(XpI7 zxh+Jm0^I%dPZjzx59Cioh0~XIC|JKNL9;eiGii;y8p7>s9*UqK`tq#z*R^{SP5R3z zGU3zny?OXC6+a$0L7jHwed3phkb>=x$@k!b`Fm*)H3k}_-CcuxZ6I5MoA7&3d})?}EID+<~@cj6fn4A+K*&tKue z_YZgDcLd~b*F|3r@Ooh7Q4zWLdolW3VdCcw{k`F_wakaJVmQ zOSD9q`18!GVix_+izzMI4054s_9>)jUTk_sYw15vIBGl@z`w8d%iBsdat#OGDIz=L&)Q%w2L&L8VpXnF9&-rj|0_FwCxJMx+T{$}>ritNgL8cbJUGU*oYh ze1h$lzsqB7e3`}aSD+HCjaxyrCH4t!28!nfZHaw@@t7NKT*h~YvXcF|Dvyo&jXv_> ztr>k983ICdIs3MNXH!(bvv11Shi~@!v-o30{!F}(?&fcv>RI1w_-Zd!IpqmUK~f`s z-w*s5`;g222=*$;?$3q-!P*qbHg$)wY{H_e2e)T-)p`DN1ob@d&LDz z3k_cC&Zpga1@bp>dJW2{Xrns-j3mZsZp$5bBV41U&pvnQ%l9Q&l2LRCX6(HSz^L z=&&vE6d}E(tMu7Y_D*9PDZBHLw!}oyY_~r;8$?gnvYD|>urCKj|A|V=F*6&;M(;ED z^c1c=GDoH5JunCL1rVK<+c(fNqa{P8wP6H>cbAs;=sE_^#&cbGa9MMtCXBZL_*95%lkQIDKpD_ z@iAYo;Cf_qVmpC3qwkr}=t8fkB_Fj|^gSpzjTvNScFb_QnY#wQ{RXf0-)w#X58<^J z`E|}f4%a~dL$Gf+lKYtSgl|#_|-j3`i3LHO{$yN>jF+8uMHx7K(f9$8oKe@*BiRQl&dQPc`+c3tj{GgV?)?P zF<+fm7YEjcN1-9&g49%e@6@R2HPjTHamg5;uVFTiOys!2u918bn=lK%wrO=5K%J&~6F-8ORJfHotC{nKdp&cs0zSju$_u zA@5p9f6o{0pD!*vxKhF!Mt8}#b?tG?Nyec-T?hZKGEKii^ySQC6p723cF)7&B4DTCB4B4X z7Xh$8eh>e_{|SXwwoV#_5qpmycf$|SyUz+d%Tr}9@8{-FJAT*oM971mz%FRTP4w{p zOK=+69eK1F&E)?<0KE-J6G zRe35ZYzvmU9j>lCE_adBZCg_AUTC{0?IQgDH{~u@c~zOsQB`8AF13|Z7kerlRc>2l zk#mv5X}f6Nkc+geh2<3`kt#=RvBOc~vegth!FiF(wxHTmRf4|86_wb=G|gawleDtg}p+kn^&@ zVJotY6LV&(cG{#F2H}6CD5@%{a9nov#45MLSyfaqzT8n!k_AJ$2VQKu+BSp^Rb1_K zdTJoURb6GP#Ndj`9JVX8X7gfR6$r^Dr`aJ#w@>0XFYaz0TvUHJ9D>Kb=>HO6)E z#rQuZgO4Y%cn!VTHt+33BqvYr7R=>t21>rLzO}gQphN0t&*ug_Z7d!Li^xBG%BSb<@Bi z?xi)1H2A;btHtUMt5;Hv6^yv#bhtfEG|Tztlj_6RT=)R%O6g^YY(BML+vsEv4I7=(4mtcD3&T{_wv}L-mBn|3x zL?+5$j_@ROJM?Y~qZIl7y^roDI;=tXMEIGm3kJ>6(hE34x_kboM*X8TnqF10)OKUh z;v!eEvmEhXE(w>-<*BKuc4A+|+BjcsAcH2l?3Fd{r2{Y478g}`9JZ^jBKu`m%j!b> zKZ>NBj#5{l~#g;njUDZ*Y;jgn#nM9o!P4S6zh z;Xjkbjw(kuF z_h4FTP;s$FWGMZQ9gEAIZVwW=#jqd-Q{osiMyfs=iR_p$`Bktte4)ofZ{M})iin)= z5rB=#U0&Q%_w>Z9qKb+IMa7E-E*>>zOfF)0xyy|_^-7YnxJIr{7tJr}R!d#K9>xbR z_#Wjj>K@wBIzpidBJUH*$njnst%e%k%uBj8F0U4P5@VYyvig~}t3(nmB3svKQ1Xix z7CH4Uq>GIilZ|{aR=6~nK}FS`*-#;`I_LnpfGLiuG6WSG+Az$B2-7+7S8T>jl;Kco z=AEI?PMkLY`QFBFfbGBoKz_F{?ygXXzrH>kn3@D!kFVl@zXX_$`c1$MxIZ%d?of#T zQOzb`2XNaxkcZnd$AQCvCHICx^MHqe8-Upxzz^I8JO(u17Yg<71wLRlu;JlQ$PJu{ z%ezg${lNXe9)CI%+5+5$cjAt)z9keIfVaEW*zgYcFrJ2YQ4ZV>46gmLh2DG66h7UraQN+Wz9|7F-C&(fFStw+K zKISjbA23z9j%nEp{J+K)(>cjz^J>`NhVlXUtZoX0#6?O!%lLlDldMT@bFFsG*;iaL zH0|eUsL#YF8*_gY(pcVMK(1$FhWw}#?wwMPT=jb|9PwHfTI~4jS@hKQY z*JQ()9?FZ&Cy?ra2}Q`_+-DMP`?B9@vvdI9{tYpCaf}-8a+&|rG?<%ctr87jB!~H^DB(KV2U`?b;vw%fVM2=EJ(4sQSn;^a1Uu`vmas0DrcT ze~Q;UZB=}+9tY?bHQ+l8z8OY7U2bMXb?UGYy#28+oP)R=9%tSvvo~S-ZSnD|O%W?k zN+SPW@LTQ+gKF-kR z#3T{_&_@IIu)iTTnkPd*Oq;mR4NagI{b>%`WMZFtfo+~Lv_TwS6(9dpFVTb%DLd&} zan3>6MSttjb|?16-%>}fq3smqE47CH#!I_%-_3nK6gtzmcEFG2!zF7pyVxJNKR4|Q zh1NpX@OZ#qAo%Bpm$(=|cvD{pg;qhf`5J>h`aBgF=P5ghJ~;<%YF?D_-+Y0g4PtR2 z)aS^#NaCK$wC~}3fjW*Y9(#I58 zc<7dPtN{NO*l{S_&dmQyFxHRO7<9)jNjowR*^K;SAI`h*zZU6Y4yu_m%|njD93;$( zwZ}agQ7kVq(y>YLeK2SGSnWx(BgBNHxOGNK&e49zu1B7;q`R?}bsg&*@M}9lp@Z-P z^UYY?F=LIt!nn@H3%_53Hd|f`g+7Iy%>2#4m^N`Q8|;I1#yzD1`B(qdq0ktdhxr#P zXF(DEkK0qv!Frl-dZ$U7kCPK7{6V=^Z!#@Dl{H_OzU-szjZ6Q!58iz|NaDD8^p{ET z&nKYv-Gl@be4dd0agx^Di{~%)8q|;!|8=hsIDa)U{p~*5w~61f)nEI7|M8@={*tWi zNlMtCtnEum!1+DNY4v@zoyiGbCTo94PQW>`EfF3=+Io{|m5EAR*8iYYo14Dds@-Es z!0FSb^gmj)PsRD?)Oc0gXcWH_m;RhpyE{G==dZ-4f7?&{I(`z)-!-SNv1<1xd=UrY zH7V!2R`-Lr-}M{tEadk~c+jdH?U#UaT`Afo zWJ-QN?qHuMtlBm!m_bh)~-(fL$X%Gvzt+Lk0}dhe>5e$nyh_m`oaw2*2MHj zlC_r;$#O6;{ZO*jY7{t1_AHsF=A>?-n( zOn9W1HY5GY9E4$dFoxP!^( zz0+5_yKlm)eYJ=BCg40`@Q}9CWZEut%JPQNgvqw(W$ zsVHubOAqzao`|1{^T*5y2m8ezGAFe6iw`7>JJ>h=p9#M=q2j~D^e6gi_gJpQ`PY_& zXRPs$_8yM&zxGc5LtpL1J_~UER8smUeYH=M642~lN$l~#WS&2h{BxXtF3wl>JqPDI zI6Zgv8}N};d%j=7@2%Ra{St5<83xV$-{Zd)_^$>2Yk~h-;J+65uLb^Vf&W_IzZUp^ z-U7K3cnuZef1n&jp7T=Ld4|N53fC!o7}yW>uL6^SnK_!qYlZ_)Xxdps{I`DR0C8tU z!~Y;yJqp5*pKEDg!X>w6#_a{(1~}tPP2;`L6ns#m9sdZ=hZN1{6sO<=7HtUR^W63e zO*<8sa=xaW2IOTtR1gQ=@T;!BP>i9PvW|fslcs4Wfe$O5p9;tL-xI216?C`vhRr8fGZIEMOVJY8>DO^~BmO2HI#l?kuC$Pp{2sJAl zQbwVCus?_|C%K@Egd2;|6de{-mKURCH9EncL852b%K{g;i>oX77NRuUkNQhLO2mhL zkqSTJ2h$op=wm$iJnC!AHTmdo{OrYtzQBWg{O~>=eU5zeLw>~bG0^!rK!AJ+#z`a0s;()kwSoVs$|ah>ubp51}wq4&#YOgdi;c=+LcLH(d?@-D|m z=W7NJKcnWOP#xssaVtJLAL9Z)XNwy7NAGvF;-g)(&GK_LY^@(QbUu5`YcEqE@BPlaw|)aiUL;+(OLRg7x~-i*|tD9qP} zbIK#%2jKeve4Ho!(B=IB1v=kh@Ez`fZ@+=>DEN-{&@cZ}Rn9H@WxP5Dz7sgt`y*11 zPw>(EJ%TuUxa(wEea@ah^ippB|4L;V?Fl)%F>TX;N$aL{m@T|!l(3#cVQd&)z3g* zjSu^!9&`<(yknngS{85KszaBj(?(=0;t?-2moZ1uvS-E?Ny`()5J}5XGge4CK_Fv- zq&cIo)h`4GSQ*Oa9D ziE^$TNm~VSjqr$9GsyKIY0irDdr6-vX!^ILxm&}B!?g4$eSeMDP8ap`OCIrRSEv6; zS_WSFnWUws*}^olj>r*@NMoEgv-U-ZBj&o~VL}$cSfuM`LN+lFw632C9zOzf(**p% z5r-XPGLZdIt_fMr^}saHoJrCPfhiO52M76M`{$n5Vd#G)=-d|k!C}&@+B%@I{|7)f zKfzil?`e4|{&GAo;4ByY4N~vHVKWAw-g4X@R1bftj*8RhTNE85lBy8`~ zwEIEl80P0`(7OMz{fnUWgK^dIRQ%EA4#D_ae~x!TvWJXdCF~V(2$OA6pBXqweRB_no1h<4Li=KLeU}qpJTI zX#Jr4!7;S_pG74H+vmsBKLnaN29Hy5Rk8_mN*R8}8~t)TPlIlUvuBI?B<&s0&3-(V z15N#ofz}VoKfO2V@6fb=qCM+NLAO_F8lQVpUL$DzpuUgA(C@|2i83UMCgf+%sUM_A zl7@cs0F++@+Ngh34FBz*n`-f_3hRXYFBC~vNTW`BzeH0$30ZJVZPGjPuFeE~XUj;8V9 zB1c=3 zVgpV6Z!yrUe-`wRwQ_w^-rksco7_@`Q1Z`@p;yGvFUQdP3^e;c47#aFUf;04(;zf= zlBN}*j`kVDPSnBEvKaay&`lm(ZzI5x|2@z(`4M}Yu*DnKYd_HXK~d+&&_h5E?2!B8 zDTvp(G4KD&JtAnR!7-+V?EQTH*Ll?%- zH^#kE)e@a9#elFQk)jpo9~CQekN%BpgwoR&>b=K3DCC3v7ewl=Wk*v z=ng3%1^ZJzX#HT+@?f0 zX!73{LpOs??vU38r(l=Z5>x*k=$896Z6DfFp9{}`k55Hh0Zselg4PepuL0eRq=)Z^ zkiR9S{#Xp1WVjY*`)dp|^`8xTNC!@E(0=m__2l=((EkJ4xIeeX)c=|FH)|T-8KM5? z$rMz`XMb0K-ntm?La6y)1iA_F>Q=>nuYsTa|HeSG{T>6&`V%qqX@(S+^#ctw+m8or zj5mdbdh!Qi=+{7J*2?{l;Ta6<}*jIUgJcy55{z1MQFztzU7^8#D zWOi@*7+C`XgNZGSXV1xFwng+|5dyGH?G}&>95%J{fDk0&je$WJb&@_d^BGt+2*?(^_|_MW6Wf6d1Wr5`N9}n zomZ}YnRvHW8*{tf+$`6I55~;$<$!vW$LBSw&Q{%#8jd@3!(^)iu12aOCvU5l$H$*; z9z8qfX{J}FU$t>pG>!YaoP|%}cotU~Vt$*c9{tO|)Q*h*zRmKKu66t+x$kDX?XnxF z4jvoOfBCCkS)MVvexgEzXh7wDTE`1B3-;ki{^viM&4*?r4{NQvs8`Fo{SgNGA4k$^ zY)pKm7mHub^m;Z~M3*yK|8KJ|n0B(Z}Ct>he6$d(qJ#XqWa}y*uDYDli6YzN#&}mDp(p_jlq~R+i>`VG=w{S4s#^PI(C|l zhK|aV0wg2bd_-sD2ge8uYbjnEP{Vl`gfGN&T38%oeVjIupW-26retrpZLE0PR2|g|8XLduv@3G zpy~;b7AU89%;Hv!RuT%OfLE~%0R%9OOs&J~|bh+Nyz{e5=J?i&mn-`hf zZWM}8E|P?cja40KU=hX1r%#jb-%rmXW0PV5v!*mJ#lgj%$h2zT7P9h}A)XYTp-%=u zr(92K)b;dw-w0C#bP-q~O|4|L~1R$K-%FbO*)dr3v2{0LbFC!U=hn>(KrR>$g)PK_t%zO<#olsAI9 zzj+ELcWLr~UMW_M1*U2nUzSyrFrY-^{jHq884}^bugiz@4cX#-Jl#W2wWFVsu5pgV z{)piSZ`7}icgZdjYeoj|JWZ>U0&h-*4a8Mq1G?MnWPj3mM2fM2ue%)}D2VPP4-^1u z`I>Aw>PK|$7=+=;H$x(r?Izz)kDRY*ez#dgq~^i|$~038qy<{<7Y2kCBf(H!hU9T7 zr5LJ`W6-o1d$8EPIMCN6lx#>gyp|{Je-UCgS%*2sa@+sVIbzu{uZ{&Es{jxkcbdn| z>IMIoC_*{52ZQ=B@p+xi{*HOe4a|q)ehUqqP-aD|L6R!CEEiIwo7F6p3E{(n3Aeqw zvx0JIYt%o_c4(%QO1l0Apox$ywGC~+&9h>cRe?%)wS1kdG3e3Z*a31nN~qc#+cRsg zrdO(w3wBIlvm)<#cggazAfPN3=ob&~ zg&0&>lA9e1xAB}gJQ9zJL!?kLIa$0h)CawKMxj)qc7$BMo2sfXiB zEZ-AfjC8tJ_ltD6#{4!~>Mj5z)E1-jwvDjss;k#ruyOvnM>J z8iIsKXAy*CGyA*DE|A%R$H=QU@{HtmBe`8^{wt(Odklc}V^ect<^7_-{?RgTv@AP5 zv)9pRSx(DXE9~C2jrDELHoqnv)%yo^yFw&eGl#d1hpL|LZD**u4(1nW}2n-H7^T(_!*r;hfmY?AV0t*!4Nl zt-dY2B3oV2S=p_-Lf$d2H~nqQD@}Prw#KI(iu~0jym%%?RgSb7x;5G}dKms44}Nmp z^_3M{y;dA)YQSa6PSp=bfyZ*-%*a-?&d6^0Y*FaKTQ5Q7qobXX-@02qt3#fzq?1aC z?C_h?>XDt^uALYcWOKEA)(oDmCqauLA?+#JH;|2+%^Dzde0=<#ai=W3BEO^SRPD|; z?TP-qC5peac~L#uLOJbUs25 z8pRkiK0Ait>WvrlqwQ|W7Usjag9YVS+lOd`n8f(-p0J%93EH28{^Md5Iq{WyqKD(* zcVVgaq^WA7*^v(Sm=DA6!|9^8%YW_ubty7+!>jW?TxS8 zyE?F41lV!Xhtc-!vGTpXEgjMF?d%Cy(aevPZ$}7^mLHUP-l!@^{a=Efkt1$j>ujO_ zn}YQCT3-M9of+d*-#7fofmBot_78$cr+li<{YTBW>W%(Vm+WitZZ<4KOKa5bLdED z3gCObNHPjp&#=mNOI(Q~a;zT1vimfs40 zAvU$6ZC5ylX4rMZ3T#h0w;gWuiA; zmL<-1MLu&wDPQZG@HNcl94NA;GOx4wu2tEw@;y{4aF9yDZd$37(fZ5Ecl-yH@5X=e z^!`f}F&$z!kDG=+s0`oFS#!KRX?UnW|V;sNL6fV z3)Y{3wLOzQ!rzk$DtFN?5-S@Zo64FJZweXc_s9{KuWc#TQ!w}+Ea`l*;y++&w(D4k zLXjiAeQl-ScPdc6H|FM;dOq@|C-Rxg*E*0rgq4$aVd)uk-cBu+{B+6?#%NM+6-<7^ zI?7IoEn~+gPCZT+_+{IcgDtTN|1oY2muV&cI(oFWJy8nCJa!ZFWbHK9 z8J)WOPm6Y@o8nSg#J7bDxMW|oz8pG3w@rq`Ebs|`wkLl8WnvSLwk+>?yRYpv>RZ(N zrNfcK9$(uoPz;ElI|8EepD3Jq>Yi9n4$*1Z@u_g`ozZfwWNIc?R%}B>qW;wS0P|1a zkZ+4vRI(z}BbP0)V$9uGJRCx|jM>w?;I_YT4v4Ni82x80=BeRqX>qn*f5poWDF!hL zb3`Y?Wuk#MWLwJ*#R6}zhY{gp7!d4eJ!6QP#_VZ&IJWX2R(L*6d4;J(2i5Y!eas0# zFLccLt}s^4#cC|fiO#oWB`3ew)!DHxMV|LYug;GA%e$l~(&fGF9nlc165|#7xKh@U z0MR3xD^R^?jDDD0$x?AKiBp$(PF=+HMru9}W4=M$VP@8PU z>hf14$I892o@~O_$e%qJ;^FpK+0m92J#P=+new#dh_!5;)6;~%KL zLmZSed$hG9d^#7)NbLtqbtTKUWG)|H`G#ng8Jd%|L3#V4gRHT@ZiZGk9MqN#MS;~q zeD?(oISP~w%&=9#)PH@1!vyus2hz!nN+)IxORtIq+LJ!zw*z$4n^8i z{y7ELb8&cGpQJ> z7rN&ozBbMpu{pgz-R27QixnSjS=O`7*T(u{*Lh+Pqap}3I+d(}4&5{+4Hp$8xuQE9 z`33g;$YHPV&La>bZKG{Uc@5zuuhtF?js}p;cx0FcoukVAQkIj0tpmk#L~+r^XSEcTM7&YK z6g;uOvn(O(hp6_9j%qp$`E2RR?OH+{GwUtJe83TAdZ`&f)Tk1X`+w0x&_vH+U9ooE z5VS(f1_N`C(F*n#RW|}qq6of%{xO)i+Yt2WN=9YYga6ZRFN$*Q$j`NBzt~ADe|MY)ZXDC(`qqR0{Jb ziU?~V`j3VKF(f3qu;PF{@((1Ya_xiU#7j&{`Mp%FYpoa$X3H~FBB!;ab9HMEHTqTu zo?(RxP$7L+%-e86-h>1#Mh(^!gE6dy-rAjVI$(2ZGDR^K61}-M0VLxJoJBjicKEwU zkX|58wKGMxbtO!et^>GD5Kh45Ip6E54SLQ_D3Wcu{j>zZ2C)oObaQe+?+O2 zM2ZMq$?)7YQ>$f;KbzFWwxEAPQdzSK9PvP^ZMG;D-mFy>{=YfVA6-@3&Qa)F`=)HV zZG?uWci-}HC*ShKZ2HKp+3r;}A{Hb~g28)p_chi37d5@l*~r=6uDjBb5!qPFCXbuJ!cD*UmByz?ZMCvXOEQgtIf#>(jy zG-kAXlW*++$rO1vX;b!UH1Lpb?b~ubH9D3WL<1XqYk#h@l1d<}M>MdpGw^*}K7YjQ zO0Qf$1s?Uaeve9*KdM(i)K1)YF&0mQSjM?{dXn04@w7=7Wn;@JEwD*jo4J9cY%=t) zGe9>_G;ptP?OWPadBKvLF=ByxVUbG$8(^CF%AH~XAQlFXdIB3@B;J2Xt_G$))V3>B ziiN`W^ADg_F%Ph1ToE8L>iem&fEvJ9_iuFVV|9kck!`NaD;^4Cg}3LDz;4!JnE3^2C-C@0NkvoC~EFiq6!N1g7fHX;?2DGWoh*)i%}r24(T|Q+vYn z7=qBiTo~CV%Q0O$Rtk$gbF35^;K*jJi3&%8oY+g5x~=PD8|w%MYvMd4Bnq6Vy^+A@ zC-V7a1)g7ivZGk8y^7_dNDpC0vADCbqgXx|Eau}xBo2@;P=a4Vu7tr7@+1tGP#}Rz zHWVKxp-4iBgozTkx&sS42ryH^90_wJ%#%g3s)sl8iNsH46v_GRq9lSmr+rVY;)0tC?jAGM3@8 zCRK(`X_hI>Smq8}8M?1o=BkWkuC|q-qnl*{8O!vwmEn*u%S_5x=4GzWlQ!VsG0RNL zSmqvE84feE%*>2suCTN_Z`rKDM|%Og5&t&@51wvB0CY0^`gA zi!&BzwiPHc3xqQk7;7s~ViveLV*&0$O0{94S%CKw(;K}LwoDb6Vit&IEO3{tK$%(K z?hFM&*QTV;G?~AXiTUgl^Blbg2lp>J54eiP?8;24FL zLOx4m`dWqM3I{5D3o9DtKd6w4D$>Ig{vE3r(oZP7S>crmJqmYV4J7JOSgvrO!ndUU ziANOPq_9xoQN1>h>j`ytN9}^q&x@v*x8=%nA}g|8p=R4c&vu_^H-nn}K9OyqCvIoi z7kZe-^t#BntAb@s>SjCF>sOgK23l9Z*4$gOEW7QfZ!JSOsUloiJ!`+g+}ed9iCZ}v z;X<;VJ9K~Nf4U>^B$BPJfwk>z2Yju(pRZMkOnuT7EzOB^x(s<^I(?B#AP;v<;<&ZV z$CmwhU89~wREYb7*pJ}N1mpT22;A!a({s$$FNvP$UU3USlyS2mJWp6eu+y`a?hcjkb8jgW2F-*_br>;G<|)oT!c~Q zTKVLLrg$>0pDRqEeK-Rg%kINyo1e0#q}_92o1(orAEp|UZEED*HuXshDTyJ_1hxB7JKzxdEVdL+ zs&d*sja`d9KI^~}*_I4RsBvutk|^-6P$JbXFP|4+c0q3;S&d!JHiE{z3|`FZA#I$N z=OC8xp%{wVf-9bZB#t09w;c$rOjR=&dbdDi;wFT3D7;$gy_$NL&S~oiU4JaYwO)== z5I89%a5xPC?@jonKu%MTJ43>w0&3g4+K_ezJUPEZ+~4T>A?Ph4Q_@?^OGBd_$%?k= zd)i%Znyg)~*r*q9mH9lZmcyVz>y@Id+N$QgdXr~@o#zKADh5(JAH*c@H)B65cJp#e z0Qc`5#ukD2%0n=gxD}NcDxUtlTHL}{ zclKlDht#gK{<$guA4IsGAZ8J4NI?7gK?}A$hp4Rw{WwD=wFnIsQq}N6R$?J~i5Bam z1&wW}O#2+(X{M$2YpVUCAkgiU_249_6vVuK%p;znK94@DE@{V>rtXi=f@rrkK9&haE%N*U? z+Y7Ka7@OBpJcl$-`u-U4yk#z*w}hWe+Fv~X_qFmVwRn{8g-NjQXjLQc+A8hh_8X4N zc(JWCP&e<4{!8Aa#X2OZFnvTfReO%PpsQ`?t?kz%Td_4@ldp9GlrV1XGUoc2l)AOu z%4;^3vXDf%l{Cq zIJjyVgLL2!cR53HJg`rjGgiJY9@q>jxmhT~eM>&JPMif7L3_k)cV8=?#3lDF3`4cJ z^`4l6MD^S$|K$XCt+z|zG7!l7+<`-haEi~EJP>)l+I{Z~KKb5BR3MJ){RFso`*$&y z7UW5P-r)*lIslI@x2(@nfBpe3~cVYjRTHX(Hxe? ze4=ZNGv$2ReN#EO_qwg?ph54|4+fL-hZsM%VE=S{C7)l&zM6pNLs`DNxKDz+M_Lbw z@q<0j+?*V{UOV*$U+d3chgkUry^N<*w~@zi>UZcL=kxsjB?Ln$B0s8}z58oge`$0Pc z_c1p3?s^Rj`McQg*7vAV`8rSehPIB?Q+IJ^Px*G=mh$^nm+y>~ug9)8-`ZtT9=6@a zvGW$a6$`9~=4^m+c&UAA*mdZ3;Bx7bK!+!=;c_T;$KNnmNlc9)nKTy-->=UyP593GVB~eiM7az`?{aNKK_H<}Zjb zhU*3H2D@BzzA^rNZKH(e2@goP$qN?UM_9Fd3Eq*&Vtml`Nx5Cmz^U*&d#H3==lHtzy`sZ61RaNjN9flug(&Di5LyI z#Ohj;(kQ|AR8pkmyIiDoeeAfucZ*mbKB{G^)l}=h;bmfd$UQck16uzsPB7!0sZQ>S zf`_x(^GL8QSA4BQ;neY!yS33^f8BJTw*;3K+Ky@_J6`N==X9s$;H0j$5Zi5Sn|q#g zZCzi-{h*}5<@&3;#%%vDM$Itvg|Ys+8_t+o4`b=@G$-oR+)X3D&Lm^$u$f^a)6!uB zx=>BrtIihdHY1jb1@XD)FOEZs4Km@Dka|*WsEr8rUo6d90szYcJ(a@bY%O_`w@{2w}LwKH^?=0be;;J6Af5F+)VR z=6AH>J)p(N!Ao7N77L2iffqzY&tj#4bqC&dNUqh9bSPOJuL9tuL6}9X%jVKPfp!LZ zUNFc8iG_$`6=JkBhIJO#n(MVxF+|lcm%B$=biA|qN9ZeDFaMr%fi2zF`YhEz9^Yn> z^MqUnpp$2!*Nt_+uh|n^2W&`;MGkim`PNR8-D6lu%!l3j`hVk$_d~VX_Vf^$C(>>0 z34N7q`4kbS9k%6q&M;Q2NIZ|`6D{M8)V2igDB0wzAs=`n5o2uLd=Z^Ww)Y#tT$pjq z!52dwCdwCdo^7JMkwfW!YL9QAd1BUi0|t<4i5DRp8)$uPxw4y}T2V&TyvT=H`5N6m zyjQ2^?&;5`P0Fv+4yi_%v2yUqN3X;l+(gvzRXFs<#J9BnrT6pwsGQAQH5+|xv%K${ zc*uo<>1D1*({QE6eW>Y~`;fUKuz|grXoNBCs&suny@a?apSGag1UZ<+98q6OOXcuV zPIGmMska}d-rpIC+mZts+r^TjTbo(zJ5$T%GQmQ6c`Z{$Si~w5Pvw#E8CQ@)x#Z12H3Vh^cEmHU0A zIxh>mb$x90W_39DIxY`JlVJ-R7X^ncsanuf)esssw4t#kcH#h=O6w zP1VDgSlKxLhG2E5a%of5;-L#qqTKv&ZEdiLMXVcx%l#KN`xk|qL;i-wkiWXIAyie@ z;BN@m*Zb!$4+Wdkd7492O(Fl%y3j)Zpxi;0HMOp}xvpV>KiE*?Z>;s#G**Wf1sg*C zMO95V2AlkY<_;fZ6)&u-uSsPEmsJOYHO>CTRZZX=)a;+%7;dORT~}4r_^+CA%`|_B zk}}p`S6?4oP*v|Qo)Kz7H3rZ4rlzXpWBoINO?6fEbt{6@z8Q7b_`?msWs6}OoHm3P z&4(6HOf@H2$5_7*kgUpI*X*wkHaGi23#%IZqtf`3j3-~EQKxQL=GrFIHK8ZQH!V;m zWWUT0`m6j`i9Yi;Hug|c#D9f zb+q$@#&8qHLwdapvQ_Y@pud?uYK#!4nILCI-Li~g{F&8_`t!tPZTs~}S)iOXu(zh_uo15WSa2^qB=+oLX zut;e6Vn!Ov3N?z+pH#1=4kH+GsVNu=H=%%T7kRH~pv$O^PP*=;{_AAZE66i*X=9Sd z@b4s39wJX;eXyzl+>{3kgpO0Bf=sFov$*g9=GD^65ZS601u-y8<(Pc<;-&4d9-Doup|xI z6ijuLVUA=cWNdoIM4KZ2*XzjC(Mb)GC&JIPE@(7cOD|v#Y4`kJwfclLx~8Fix&MZ$ zB~{JUO?8O>a!54$o5PD2H#T8jw9b>$$Iwa5fklf$%Lfm!mQ>Y;gZ?jnnar17A+sy3 z^90ottZlB894Ij@Sk+V#tO_keDJ7@U5TuGa7$F{WRr576xZ4WO4p!X=QKAwtb9J{Y z88mc|HK_rT5EjV8h$UtXtGq$hs+JEk5m3hZFRZZ^H#IJ53imtLm%6^;MyuRAvU|A1gU2r_N?0=F6FMd_&D7PLGx`d7I~0djzW% zSr>+_p+koSmxYGWphK&xEu#J+letpvZgDNY-9yeIL}_3)zgh;=%kb+!inVfzvjdlK zpHp~ zhk`jRkU~Y5NYsr#4Wwwi$+OJPQ;>;gshwxA=D`Js<1rg@@O`$^&cZ#biKlnDta&&& z?81LYw_04TC8sgxBiF~htljRsPkUJr&wdi0c*lI)+w)`=a$n8LLc#}Gd2jZ%HuT{6 z(>;c^_V#?*V-(Jx@0s^XFY8}D|HV@8_X7U|y}$I=9BWtati3tb%e}L3eoszrOCM`{ zPS(dc){8k=ILDXoR!SY#xmHdt6yK{h2H8k3Q=yasB}{zQa8R z$-j5!J>j#y>FJO2UwQKW)z|veb2ZLi_2#YdS>MU}$PMDB*%!q7dJ@@pctEt}4KJtRNy=UJ2Io5MM$+Ev^-hmuzL-vy% z5YOgFS^e4I5kGc+;v1p7pYfvD?OA8w^nt8$u-wyw$v^Hv_TTiFg7csB%)2AU z`g%4~wq*azg`^{=%QwnhY{waAc z_OYJsH6Q1X_0IcqAM35&St$0O-mLMvIXwS)&iOe1K%C#!=PaB*%kKF`-+_PfSx@%O z`nAt`zHb)JQ_Y~cjqlZ!@xvK?B1L!uu{?WM0=HwN>c`Zfw~Le3YVYSm~wuf#V2i?c&LzBqiDn5RFm~8(4-d>pLxKq69f|Nb<4McQ# zAA7fzrMfHm{YGpZRJR7x_*wgjEe0T0)}M(X8s zo8gVRBgI~9M#Cpe4ee~}3w)wbZE;L}NfRacxh8k4*&FHfMw|JKEsjgu7DIX&qQlWt zF;^#8ZL11hDfExLS19*W#w!lV{gi4;iZBEiDUbYO^m1dQ_>@$NNmQmXv(>&vj8|=7 zd|#4n=Qo+8vT}Q!-a?B;CciPN?J2&miBG<|C*qIgS!LQ=jx@9w7R5p4BF)y$w)R2E5c166@745-|^>f9yS#!pz z^`RfNz#_J7k@1qA?r0CZdkp4myunP{#PKy1)kd7M4w#Vzsit08U0$&%ye`ti1ska^ zuZH#e5!-}CMgOSpKPLCP;**Vd@jx4}d^@%;xWl9HrNQ4u{^E)J2XC2Qie;~pCvz%;Ez{3^Joq}Q(X;6)``Y%v5b5dR?dfTF;6~mN>icbQBjs*;W0xDf zhi-61J=(UhFFfQ5W%0Y9UwFu^P9g`r*clUf{isMze1Kc(YCp&%e4A%d&&X%D`tI7s z)UX8-o*jgy%h$s3gvLx=JAo(0DLz~+A!J{&yx6VPan47zI4a4#)Zut5a zK65#7uH4eRE7T|E(*1`F_BXmgUtA!CyT|yXQoVsMZKl3rwlh{%kYQ_d44=U<1_(T> zINR5@9ztS8Wj1}VFHC;T%-F-A*bfNyLI*oGL+mv-JSgU|7|F4saSmqiekGl`^)Vs2 z-o{K$3@x*fqAE2$@MxK~D+-x~1j8*bmawq=!jVWbEndF23?=FCMT==%BTr_ym|;sr z3msC?km*JzVe>`U1Chxm=z*OsPokC&+~jwjPD_0rGI1Lo;iWrBc%042E=Ji{(LB2z zO6?k3YNCj&Zd)4rL>VgL$)XqS=$^xMdE!|-_&c%M(8$DE!9B&Sk@m>!WZWASE1GCy zF5lUW*NouUDJnW4=2q@EjJ4b=<8CTcvF(oD>c$6RE!@?V<^$>FMQ?Ku>fHq^;heG# z86FLL!`Qg0Z@mvLO^4txQl+RxY^3g&D0cFfFx+q(GiB@WG91r?V?_&18_bG$auNeX zdWo41i>DjX*IL5iL|^F@Ygywk4?cHT#+@Tr&M+ciB1AiMBeo|eGO^D>ec+IivSKZK z-`A|1yGR1t(d*LOBD|;)f$d`K8q#Et!hmgQcZh*eX2rvYoMRnR=WHwz#1|2B$He0$Za2QQ8+G#|{Yi`V&lR0C zreb%foI*Vs%}RNlJzcn-{=)!Vt=Si}YF*cf(TEl&WAbBM@}XQT^{vOdmLjdQ zr#tpVWufFiSt-n;_o8IFw8L=h&U9jArC-=3^*SZhanaYb%oF0eTs-e?PWfsOeuL;Ot-z3SVFPgh}-Q=YICotGPCz@Psz*0LwS-XPgyMfR1f_#~FplMbJhzC^Rz z*Rp|or>8Twt^jjq&n##YE3z$TF(;{ZBHoLylgSVo*qB^KVu{9LwgoLpiJ6pT>?=~c z)uwL5($yO+gamN|0t?uwYn&zMEkcM;0oy&b5ruNez}>#uO))Zvq^*QAwZc|@>KZPF zPZ8=+7d=VwE#1$eTch%ONQnwA3k@#~mD6rL;^iCJy@qls+Gq!WkwiPaokd>6*)04R z3%(vM`E~eD7U^p1?CG%$?gpG7{y{?ozm=R|Me+^3<|< zx{l%5*tZN%&e1FLcBc;;4s7#YLxk`~(6Z%e!wkN{^?8jLsXp9s!|!{<@wyD$iu1V& z+j4oYmgF=d7R1~*y_ap4GK;-KK8*HAtiy^{hkSpF7&budD~#ob5LR$SCB;| zj^<|QyH>v41+R|X?0pJP8my-;0Ua;pIEbSOfqRv5Fk$-Ae^L^l8?Ahbx3-=ZUpNY^uZF9O17GV8p&{ae)U*Hw!P|5WO;H(#jJbcj*sGRm=*h#;o{3+7 z%AV{i28Qi|0v$PvcWPwv}P( zOwO*Y&7cxrW)g>_{dyzQG!x41?0GaX5W+X~ zxF9g3Aj-C^I>qv}-Vf&3G%v4S@IvBO@MB1FT;e9FE5dI{CP`q^>Cx#>Uwt z*rkH)AvM9VW92@O+1M~Vc5HbAwzXi_O}B?GpI&rTO3pC^U_)T2uDZMGx#y?2s=j`H zRrQU7mlTd2TZX-Ib51?VEXVM&oiTm zWmAMEcrSy8)`v2v6OVu6k0UNAZ_=Hf36ZP`cX-`%diC+{keZeD_Jidh{&22YEbx!~eAH(C+$P+s3oO<9k=Bvp{dQ zi#F%MPokUr?zu@e(f&<}?*=E|SCfJT-@X%+#|NxDqWsU$)9sMQZ@TkfpPGC#QtF9% z%M@SfG5Gw*Tc-GmO}?ump0IEW%c2d}E1rcWkL>G-=<6xTu0}p>xEuV7fws0xj(G3z zxR-4k-zn@)pT-6q?C!9vDnPO=f=|}VcQO|vpP%bY=A+(Ok*qtswWf!`KfqD~{uam_ zhw~(Vjd1Ys^fBtT5Ih~=;U+i51`JaZdnq=lC6> zA;rDFDhV1)wIAg+;csrTY)PN2fN1|Uy8X24C*bLaGL1<-jB8y-sZj^Cr|z8dTJTq5 z@=u9)r`_SH)^UJ-F$Fw(@HgG$({hVas#Aw$;2Vxv@~j>pwBOCR!yC!E=1z}iwJT-i zt9z6GA@G;Mn9paDeV?Ioac|)}`@rwdu`ID6P0OEhhqv@j&%{WUyEG-gxHtPP8@gbx zlhvQTq4|~Wo-ev=6}VqkBu%gCn~3rqeNU`D#d*?E$v7>iiTmYBI1CT z&!x34TK6@E?wBQM$HCe7568>=&&j_vRmB`+&s4m(^fGuc_Bcl)ip@y1bbN15FZ7vi zYoPb+6fv>4`!17`eY8o*UYe<`3)0&<2mCD~ENegfz-wV}J9PJ4ZjQ4F!td9h%sl+i z@>{TzcVl`P_j87QFwO>|nG;XJe>leSKV8QX`k^6g z#}0-VX0f5+hWff2gZ0aeq{?fj6>!0rKRmV4&mUnf z*#px?EGf(%o?5g|nl>Z9a+Fv>1_FVyB~sdg>gr0q?6){n39ajc$Ejl^7VQC5ROpsT zkx}Bx<+yAm!_!!}DZXSx=Hzs4rvWmQB+F%TOxv1f|tp)_=y(H zAI^&8M*RHYvzzKdLA(GsaztSIWOhla^5K=2$<9M}AP>C*jgW|SQ*(49_kUmwo8SNYB)qb!21inNv2&YYDVIMqNic!V3E zq=?Vf(CWrTd|}$ctKOmD(9&t;m!h8P8>Q&r!pd5{N@xvT(A0>kPyFy;zB<)lFTgpc zz8UzNfcakE zy~p5t)ZlyV7<`)zzJ15wd)DB4{TO`P4Zg1)gKxLNM_sj@&&4^Hc5r`j9K`>{vJ!OI>l~UX{A%#RT&4z-5k2-ZCgS0BQ7l zh)v4FvRTO0@(Ljjmr2P(%PT+%>&4Pj9OSE|Lh2wN^NST~zVBU)Imv*$H1!-&tZN_@3Mjierw}&dYVXC~Zh9q%lJ9A8#^7_4qdsRrI zJX}^LVK}tBLWjJ2A#XVHDQ^(|w7eM(dH$K1 z^}EI)Z>OQ(5JTP#4tX0OFN%D9X#Hv(@~q$C+6YY4?-KlJ{o)RJ!^_pZMI}(n;}2Gt z_L~TKdR$&=$m7SE@h{cCkXNc?5wyIm4tet+Puq`UTkGe7Bb)LzK_17dKD4}lJLI)P z9`o2}j9v)i1LEtm>+XH#o$fvx~_|x)g9P)}FZx8bIq2v~) zlsDdxcM&EjQ(k*L?lCh#9lE{egU@X5#8+|O6+Dzzj6YN-`ujF54>XT{4eH*9B$NKK z(jo6&$kY8B0I!yJw~`0d#W4l)n5PeYewRbu9>~-F$MLS^eNV~bC(L=2K_26VK3Imw zcO3HeL7pG!Z0{ueX?a^6@)mX@?>UFO!-l-chP=I69@2QU(0)o+(tiGj;|;|}AK~%z zVi`vzCHs`!__pHH>2HJ2Jakuo=BW2w@YNU{uI>B}@EOV0fY)#z6+&p2GW=<~@ES$; z8S9&ZAM@*_7b&21y+osV)`mFzzOQ_?rtRDhJgn1{x92|0lV>7P9a^5IHJa&O>m#ve zLg#ssPdR0ZN37^{F6$eRZ0k@YyVLl;lw!u2$1)TW^jTsIlj&moxv|n(4x9(2*yk>5 zs`V0ZIR5w?G+qAL*58ocg1@QwBfoig#L6g@;=#)1YREo_qzmy!{#!t^Ze2e=KwkuU zu*!cCG^Xriz89U*fi%W!mcJUbewIo7BkU~vjaB)8(Aa=2vVq2D@>1tZ4?t^3*bQc<30$;Q@zvctp!mDIVJu66F~I z0&E_uXNpx|f5$w}Z}c(0v><0*`TcET2J9A4a0o;j!I`Vg6~UOq_YF zFIWc+6R`9<4*DMsI$H(We}ngYTuJfSsXkb~%0Vx8&_8j|FF5GEil%3tk9hGnNB)_r z%YP&CFTl4bbnSAjSL>jE;h^79G_QHiXB1LhN>4ry{yMjIHDr|66-NVGrpaVc=1O?lZo~H(?JhaF2<2{KH^EG;d10(ujpctpKtxu zk>B5NN%9X@^g_Wu(pu`sU+17VgI)|?>VF#Mmn=?n>Y%)H9P}8{7;KWM$jf%0E-Nm}iU*(|J zfHwQ!9U0dk*u%ALEi;h?_Xf~XB_z-JLoSOA(rwcIOwp0e!xM$<)ANs z5Yrz66siuMUhSY)Ip}*G^d<+r)j_`uTJODJd%axvs)MX&JLpjkdOB#m4};|^Rlbau zldU=j|Mx(*Agprl1^IvJ$p6SeXB$%z_2~=RtnVyG{xAo9wS!&?TJPgv{hJ*5yB&0& zFGypo1)PsN3$2P~D)l+!pk4i$BM;^ebI=jcdanoTf83FO&_VY*T}zYom4ViK99X^u zw0@>f`Vr82UjXSptMZ&CPsMu7{YAUHi$UvsB;+5a@(aOsKGu`hD|(Nh$5=mclz&Fi zyk~Jf)|WYF*yUg7ps#e$a~*WEgI?pHA9T>qJLtC+&3hi_TVJ+D8%t33$6N=!LeVda z_FZZ1R5b5NoR2#V1B^=~mcLBVyvJ}p*28x?@*j54&pYVfI_SSR=)WtPZg48D=Z-2m zD)hU=8lW!GZV>b!OMK*xAG^b*1oLV681b-NJpRW6>l=pFH8)ia9g$DAv`rM3l{au> z@FejB5ZfeXR@xuhk1`(G55;#nLqkVi20{Oak^=c-Dl5yT2WHOvYUPCTX~i=qT{Eq+ zk_t}7L-N!U^txq%I;sB1(V1&K$xR^LR-BjCCDLul{TR|l1tT(RTaq7GavXst zwtGWWQ;@3W#LAsm<;UFtqN*R2R{e2HcB?>!4$%8*PNqx&cgv@3T}f>`=}uPZlxwaU zKc%vgAKaW&=@?M?BgKf4yH_UFq-|6<{?3-lYiER}6joN&HP$uMg9i>dC<&QjV`;*48xyYg8fU-k_5l7pP>xvSpQv@yY|g za9tN#Ub$qrsw+QV?jM@LPl^`9gH`&7S%vwdCpDMWH_orBPkw{~vCQv#CN&%nf63x#TuSyVdfCi=B9K zw^Eu;|Lvwa)iG+{UjFd%P;H^vZuXkWlq(T31j?%G%IfN(i=~*cghh(BFV{(+X=5-Ue&L>Z)={^I~jK z4AoXvFT4?C^|Go;k%f|KGbzfTX)`C4k)8P=ig&iQuy|px`bKV-{^BQ{;J&Sd1zzf26X)DIan6^{s%iLqB!eS@ej6UX!!mEC0K=T8q-)u>;xNR2IN z4zV*4mJq#$PA!;*AK{Qp>5AlEMoH6y3xdn85xC;zgNlriNvDZC0(AV1eVpZ0_#+x(Y5qPPhk8V|}TG0^Q? zN7pOV+eq{CvBz_g`b~&#%`(PNCAlY3C}G6GnT=`>tsKtBZjjs!s@pMInOvAe*y9*V z*0E)&U9R>BBjUS?quNP?gVF9%Karg53;BB;BCSSQ74EViuxznNvsabc5X|ktqbAf> zEoj#BCO0()ZE^}`Hid&p&mA?Ps=ir~>FzY5z%a2e4o$+JDhX&KjS|VC2JQB`MVLne zH`rCR#ohG!uvqGT=#3a{D5Lv3O&?Wx;e^Jf>L4}fMu8b4E0L2wTFrsW$A_@1dVUyN zmrvG<^M_9iRxMUuraMb@>cpyMhXSe2BCCi*`J)^`N=3WWWu4H;bRrJ1=HvjeUx6AM ztnJ$57iw#zerP7?2x9^|mX8YXXCaQMHV1ium=Kbs9Q?M9HoQE420q-6`B*KmCg9`# zHl+f>iR5}HzXZPpQxm+3KVsy#LV=sh;1giIg~0|ea!7O8MXL%2xV7WpevkSwY0b%%fap!g2u!QTI45ysqyQ*lD zXp6qorlJi?MfjDvr>@X+OE)uF&mcv;^z-Vu)R zYC>|H7eYXC7rzP^&i4Zh&t}0?mtbw$tmnt%s7>ltP>cnZV+^X+;7trpPU7v6)UXj+ ziIMXE_5}~ML`ZvMMD#k}70Eb>@=cLS!y5u3e9I<{nNhc(p$fk)6huqeEQTAo1R8^7 zg#4|JfUzt~e_cgP1M&@*{4wx8=kJtcjyZtK4w@lAQWktjdsnE2(>P=3{6Y zqf6g(f_akH9LIYD37+@AeD4UCGv-Cq$q1Lx`PlCs*sgNQ^RX$a{W@&?7H?mfB<(6uH{s?6?!#e3o+1_Y6nOD*> zdRjNqPTCjpM=)04O7p~ZKY4q~wQPXPHmnCGFX3f^eHAw&R9M&8Tsgn0Iat6@Det3J z)-*R(E)<`2;aGMu$s45degob<#f@YBtWo8n#+oqRTtxx7ur%I@l_k_FN)UKfbPoR9 zlPv36;$+KON36B1hl%x;^$Kwjp2_05mb(%A=ZK53H1NBin^?l7JgD=YxI@Jx_jwRbmQ&n|HZp6xRo&wl5q^aYA;RP>{Yep1mM>@Om} z4@fy@DEb0LFD9{0S-&E}e!Wn}eJ7p)Dl8&G?sde~mbF!-zpgM3`()gB zh<7Cse6xvnTGp*Z=zSmYR(#Ju<-bLQ9(_-de3uYGk0XA~vhF0tEbCF?a?5&=h?l{* zzlHtz32_bj9nUdYu7-&Ae1jOrep@0QYQIFh3pT*M5Zbv9kainOg#1S0*DdS2#Fh9V zBI0d$@0|Dzl)-)uQ4Yv*Rm8jTP6n|ZpW9OT-&XnDA42{ANrapM*q`D4Ch8-i9XA3g zZz*ZWTTi^lvVKhb7WNG*`eR}(^zAG2FCc=xLeVo7T~CC)TZyYJYa@{IH<50@Ird*r z-&2Vg6qf>-KZZ2&XAq}he-rWBmh}t~{&0}^9lU#lhr&!BO00sP6X)YMa}`}htg);x z5pvpqlpiMzzx+NCdx19-@v_`@;$+MElfpsR58?hUzCA+x9^P*vHo}jI-^V@*;s(pw zMMVF;PW%CWzleChWqn30!??k|2G&0a$ojcYg7uFlqW)P#)L%se|II|my^9FB8;Owj z1QGgip9Jw8BIF(cQm*?8_=DX3M93XNgxqmN@Lx-Wot6_}$Gdg4GNbK=`TdcSxbbR^$LGLguJH}{T31Y{~^L3PsP3r(z!s|Babxd zxq^s(nL&iy1w`V{{#{IJBX;~b%p;XK8$yAu}_2L&jqslAkrv5 zhKTahh$w#}5%qnW2!DH;_=siwUg3ud&;An9zk_*+2>-m1h;h7J(QAkp$3G@Q&TB-B z5AM(4_&EDa;5RI5Eb&hKZYvSvrICp7+e$<`)+^ksaJRyD6nf7BALO42q+gy-dIk1> z6TgM|Q>D*Q=?%nM?D-*LJgg;RJZvPQ9nTP9r{4l;r&mcs-n+#6@U9UNcIrP62>mWr zSVTm*89w#Pyc7hKTX94(J+=dWqowvBFKv$GG|h5qwW8>?FdUUQ)Q9 zi1GD-q7M@>KKh;wM7?JdF+MINLazeiddvDU5pu2~J^+0cU869paD~dhOVJw?Zc+G0 zpzBeL*K>ie`&mHNn@{`+_G_y2DJp%wN)M~_l`8#imHvQA|EWsfuF_vq>3>q`|4`}K zepzonAnUz|hpWR+Hjwq6 zLqxp=Dt(+vpRUpus`SMw{SKABUZrnT=})WlJt}>lO8-Emd(W5k<^Wmm`9#!PsM3p6 z`Ye@RuhK&*{c9?HgGzr$r9Y?AUsCD&Rr=ply88lIZ!aM0^%GI=NR@uMO8<&VuTkms zD*bkq{!NwsU6ua0O7B$Zzg6jnRQf+v`cajBdakT@Fp&N>oHX>gN~M>p^a_>UsL~f* z2);_hPa?)kI}zhyqrxYE9Dlze4S6pSAG55#5HVgpB4WIJuJDYDKqH=xCvL+0N&J~* zRT6)Wck_s_&(~G{W5ma?P9bi_I*?e8^&IgYY?mJd`~}`ACW0QJ=m|u~sURZG+@|u| zRsKem|B^~SsM0@C=|cufd4)vCpF)JZ+f;hHN`FYDze|K&{sp8xJVQXk9;Xu#zxee| z+T%(h_@)Ed57&|gUp*0gw-Lej10ve9iHQ2Q6H)(RmEQAWnVw4o-$+GICW5b(_#MQ5 zML$lw7V#PAnqpbMCOr%D5^=U=y-&O!dS3!W`FtYe77h$Dfdd!4S1)SH1wIF=w>4NCC2oh;(C+x)0Xuk;;&#g zVh7%JXFlY-Li!oYdYkw>?8o%2mh}&n|8J&01$~AheH*SDh$weDkm+ZW-i~&w^h;Ix zDAM<0o>A!ml|Gg9gBUL=y+Wne5r2Yp9MfOGx{7os#wlskcQ0w^`4sV+`0gJO{r9@U zKNHb^{~=DpdxOJ(&so-~MDU%j@KPf9W)Q)*OkoU2JKjwie*7aM{P<}i{P+bT(5!c^DlzSS;a$8BG++HHeC5R~3a|96eodKj@4kChosKSxVhu;-l<2z{?0&PTjabS1F{{Yreqvi1?T zVEs>g-Lm=>03p{8q@IIGqg`W(XxBAFwCh?T^jts${|X@aZzm1@bwuz#Oa%XCBKV(G z=otkX{c<`H{W6FMJB?E~Q{hq~?ENhw+ViMN->uT$C4L+4>|F+Y2;-CpeJ%nDJClY! zMMUURPJ}-5nUDTxB0}%w3Rf{7{qYUrt60YqQSWO+=Kif|i2BNiNUtWM-ew@> z-Ao$w#)+u+ej@7KO@!Qk64Bn%3ndO#c!k253U5%jj0pc&sql7%YlyJxI)&?r@RtV^ z{R@RJ5r1S^eP-Npc+_ZNxK?_46}$HLf!{%#CSqKa5n=Z`h+E)?#J!leh>cjc6VZ>Cj03)IS=SQ};9V?Y zJ?0A{>=h@%UmqkwpI<5L|7Fl9e*qEtT?V9nSCC$T`ib{hR*<+J@q~!}zl#VtKP5)d z4&v?bH90JB5E%cvxYdBJiW1Pgi(05q>#H(IXX3R5+ao|Ewg! z&do&Vb-SX!MugtqCPKgaiID#w@d5M$&^6q$en*5|UL|6_I>3C4=eLRA|Cs6EbB~vN zrzq?TB;T1t@Z~GKj0pJ{dO}_f@j1)tr|4cp$UBpWde2ko7Xc{`;ZEe2sC<}I$Uzk% z|9X|*pz^~iA59net5yEJD*yW`f1}DDNQ7M`5wDT&B~t$g`JmU`MCkP$BJ9EP)ayZ& z|Afj%lSDq279#&QD*u4W|Fg<}N9BJ+L_MDo(XT?kCR_)&AP2a?Es@_>CN1m&eex*> z`XKy>`rziGzAq7HSeC$Y#B1hHk?-kqU0F>U<(r8pPy4Vvw*uLoJ5~O-RQ~r={ts3D zk5&FNDu1iW-=XraBhJQsdZvF>ev^szyqM|GD?q#!>sBI|7ZKqU3yE`2yj-m+_kX*d zI@CjZEhoZWEkxL>4M=2d{%ys+5Jn*2c+*&c7}47linns^RPa+j^+L}uHT4{ z=D{0x5TM&=MA^cByG zUdFf~U5|02(w`*mf*j%#kVCA(I@}F}zPUu`e;M&xxKBuY1=n9H{bu4XEbE&@=)>;= ziR&*#zozJq6z%oM{Bwxl=eK}q-z$iZV7;u;7pU~5MCft1N`F`(zstjNyNKwQH;D5r ztCtt}ZCo!9G5_TgQBSGD>xhuIfLM)wQgoDv^4}+d{}&2hRCthxb{I)DCA&VtYbdP|Ae?t z###4nHBK=eW-|R%*n#-4WvwP+9B@A1IDm>`9K1!m0nZbO7zg|$ImgkDiLYT@MTCBT zAwvJN=&#@(Pu!3D;zZ~hB39vgmI%GyBi@7i7(_JsbowF2M*$J(0V3>It>}=V*AmeW zKUB2vXV7~T{l3cQH}+{SzN60Z!EfTT+~q2LG7)+#Qt4k)>Gu#9;JbG!{nsk}Rh7=~ zGPvgPmK&0LDGvt4X2zrd7 zrxI^O{8IE%BJA>YMQ>E}bBcaR(H|%}i+)!N`x4Pl`9#z+QKiozw&A$|5&pQ82!H%8 z5&Vx37h)YnL^}==QU0Gq$n|6c;SYWy(nk`Z-((=;^jy-g(@n&kxb7yR+>eOx%bx?u zw~aLT_7Qhl)<1~g%Q;2z;dKmp2&R zF8GR=SKpA|rgUL2pQF+*B>qv2=RaYcMEV8%N4!|AFW*Exq@hedA|}!PL=0k>$#VT! z2(Ul_$I%i z2dnW{rtlVpk1Bjap%|YiHziBTyG3CeknP>1X#UijXuqQ0SG2!}EH?zme6fE5@>-dG zKFX{7r&RtSMZd4;e(+KE1z5Kdb8+2BywI|C5-&n~h>+)kttnsZrx*mk1MMCR`x1ws YUx?s)A4t9d^k>lL0ZC6G4S)In0H_>NJ^%m! diff --git a/node_modules/mongodb/node_modules/bson/build/bson.target.mk b/node_modules/mongodb/node_modules/bson/build/bson.target.mk deleted file mode 100644 index 47d5b758e..000000000 --- a/node_modules/mongodb/node_modules/bson/build/bson.target.mk +++ /dev/null @@ -1,126 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := bson -DEFS_Debug := \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' \ - '-DDEBUG' \ - '-D_DEBUG' - -# Flags passed to all source files. -CFLAGS_Debug := \ - -Wall \ - -Wextra \ - -Wno-unused-parameter \ - -pthread \ - -m32 \ - -g \ - -O0 - -# Flags passed to only C files. -CFLAGS_C_Debug := - -# Flags passed to only C++ files. -CFLAGS_CC_Debug := \ - -fno-rtti - -INCS_Debug := \ - -I/home/vagrant/.node-gyp/0.10.24/src \ - -I/home/vagrant/.node-gyp/0.10.24/deps/uv/include \ - -I/home/vagrant/.node-gyp/0.10.24/deps/v8/include - -DEFS_Release := \ - '-D_LARGEFILE_SOURCE' \ - '-D_FILE_OFFSET_BITS=64' \ - '-DBUILDING_NODE_EXTENSION' - -# Flags passed to all source files. -CFLAGS_Release := \ - -Wall \ - -Wextra \ - -Wno-unused-parameter \ - -pthread \ - -m32 \ - -O2 \ - -fno-strict-aliasing \ - -fno-tree-vrp \ - -fno-omit-frame-pointer - -# Flags passed to only C files. -CFLAGS_C_Release := - -# Flags passed to only C++ files. -CFLAGS_CC_Release := \ - -fno-rtti - -INCS_Release := \ - -I/home/vagrant/.node-gyp/0.10.24/src \ - -I/home/vagrant/.node-gyp/0.10.24/deps/uv/include \ - -I/home/vagrant/.node-gyp/0.10.24/deps/v8/include - -OBJS := \ - $(obj).target/$(TARGET)/ext/bson.o - -# Add to the list of files we specially track dependencies for. -all_deps += $(OBJS) - -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual. -$(OBJS): TOOLSET := $(TOOLSET) -$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) -$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) - -# Suffix rules, putting all outputs into $(obj). - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# Try building from generated source, too. - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) - -# End of this set of suffix rules -### Rules for final target. -LDFLAGS_Debug := \ - -pthread \ - -rdynamic \ - -m32 - -LDFLAGS_Release := \ - -pthread \ - -rdynamic \ - -m32 - -LIBS := - -$(obj).target/bson.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(obj).target/bson.node: LIBS := $(LIBS) -$(obj).target/bson.node: TOOLSET := $(TOOLSET) -$(obj).target/bson.node: $(OBJS) FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(obj).target/bson.node -# Add target alias -.PHONY: bson -bson: $(builddir)/bson.node - -# Copy this to the executable output path. -$(builddir)/bson.node: TOOLSET := $(TOOLSET) -$(builddir)/bson.node: $(obj).target/bson.node FORCE_DO_CMD - $(call do_cmd,copy) - -all_deps += $(builddir)/bson.node -# Short alias for building this executable. -.PHONY: bson.node -bson.node: $(obj).target/bson.node $(builddir)/bson.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/bson.node - diff --git a/node_modules/mongodb/node_modules/bson/build/config.gypi b/node_modules/mongodb/node_modules/bson/build/config.gypi deleted file mode 100644 index 91ce77280..000000000 --- a/node_modules/mongodb/node_modules/bson/build/config.gypi +++ /dev/null @@ -1,115 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "clang": 0, - "gcc_version": 46, - "host_arch": "ia32", - "node_install_npm": "true", - "node_prefix": "/usr/local", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_v8": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_unsafe_optimizations": 0, - "node_use_dtrace": "false", - "node_use_etw": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_systemtap": "false", - "python": "/usr/bin/python", - "target_arch": "ia32", - "v8_enable_gdbjit": 0, - "v8_no_strict_aliasing": 1, - "v8_use_snapshot": "true", - "nodedir": "/home/vagrant/.node-gyp/0.10.24", - "copy_dev_lib": "true", - "standalone_static_library": 1, - "cache_lock_stale": "60000", - "sign_git_tag": "", - "always_auth": "", - "user_agent": "node/v0.10.24 linux ia32", - "bin_links": "true", - "key": "", - "description": "true", - "fetch_retries": "2", - "heading": "npm", - "user": "", - "force": "", - "cache_min": "10", - "init_license": "ISC", - "editor": "vi", - "rollback": "true", - "cache_max": "null", - "userconfig": "/home/vagrant/.npmrc", - "engine_strict": "", - "init_author_name": "", - "init_author_url": "", - "tmp": "/home/vagrant/tmp", - "depth": "null", - "save_dev": "", - "usage": "", - "https_proxy": "", - "onload_script": "", - "rebuild_bundle": "true", - "save_bundle": "", - "shell": "/bin/bash", - "prefix": "/usr/local", - "registry": "https://registry.npmjs.org/", - "browser": "", - "cache_lock_wait": "10000", - "save_optional": "", - "searchopts": "", - "versions": "", - "cache": "/home/vagrant/.npm", - "ignore_scripts": "", - "searchsort": "name", - "version": "", - "local_address": "", - "viewer": "man", - "color": "true", - "fetch_retry_mintimeout": "10000", - "umask": "18", - "fetch_retry_maxtimeout": "60000", - "message": "%s", - "cert": "", - "global": "", - "link": "", - "save": "", - "unicode": "true", - "long": "", - "production": "", - "unsafe_perm": "true", - "node_version": "v0.10.24", - "tag": "latest", - "git_tag_version": "true", - "shrinkwrap": "true", - "fetch_retry_factor": "10", - "npat": "", - "proprietary_attribs": "true", - "strict_ssl": "true", - "username": "", - "dev": "", - "globalconfig": "/usr/local/etc/npmrc", - "init_module": "/home/vagrant/.npm-init.js", - "parseable": "", - "globalignorefile": "/usr/local/etc/npmignore", - "cache_lock_retries": "10", - "group": "1000", - "init_author_email": "", - "searchexclude": "", - "git": "git", - "optional": "true", - "email": "", - "json": "" - } -} diff --git a/node_modules/mongodb/node_modules/bson/package.json b/node_modules/mongodb/node_modules/bson/package.json deleted file mode 100644 index 4038b4154..000000000 --- a/node_modules/mongodb/node_modules/bson/package.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "name": "bson", - "description": "A bson parser for node.js and the browser", - "keywords": [ - "mongodb", - "bson", - "parser" - ], - "version": "0.2.5", - "author": { - "name": "Christian Amor Kvalheim", - "email": "christkv@gmail.com" - }, - "contributors": [], - "repository": { - "type": "git", - "url": "git://github.com/mongodb/js-bson.git" - }, - "bugs": { - "url": "https://github.com/mongodb/js-bson/issues" - }, - "devDependencies": { - "nodeunit": "0.8.2", - "gleak": "0.2.3", - "one": "2.X.X" - }, - "config": { - "native": false - }, - "main": "./lib/bson/index", - "directories": { - "lib": "./lib/bson" - }, - "engines": { - "node": ">=0.6.19" - }, - "scripts": { - "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", - "test": "nodeunit ./test/node && TEST_NATIVE=TRUE nodeunit ./test/node" - }, - "browser": "lib/bson/bson.js", - "licenses": [ - { - "type": "Apache License, Version 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0" - } - ], - "readme": "Javascript + C++ BSON parser\n============================\n\nThis BSON parser is primarily meant for usage with the `mongodb` node.js driver. However thanks to such wonderful tools at `onejs` we are able to package up a BSON parser that will work in the browser aswell. The current build is located in the `browser_build/bson.js` file.\n\nA simple example on how to use it\n\n \n \n \n \n \n \n\n It's got two simple methods to use in your application.\n\n * BSON.serialize(object, checkKeys, asBuffer, serializeFunctions)\n * @param {Object} object the Javascript object to serialize.\n * @param {Boolean} checkKeys the serializer will check if keys are valid.\n * @param {Boolean} asBuffer return the serialized object as a Buffer object **(ignore)**.\n * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**\n * @return {TypedArray/Array} returns a TypedArray or Array depending on what your browser supports\n \n * BSON.deserialize(buffer, options, isArray)\n * Options\n * **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.\n * **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.\n * **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.\n * @param {TypedArray/Array} a TypedArray/Array containing the BSON data\n * @param {Object} [options] additional options used for the deserialization.\n * @param {Boolean} [isArray] ignore used for recursive parsing.\n * @return {Object} returns the deserialized Javascript Object.\n", - "readmeFilename": "README.md", - "homepage": "https://github.com/mongodb/js-bson", - "_id": "bson@0.2.5", - "dist": { - "shasum": "1181da970e78992c777ee0006559dcc3b3178c4b" - }, - "_from": "bson@0.2.5", - "_resolved": "https://registry.npmjs.org/bson/-/bson-0.2.5.tgz" -} diff --git a/node_modules/mongodb/node_modules/kerberos/build/Makefile b/node_modules/mongodb/node_modules/kerberos/build/Makefile deleted file mode 100644 index 5d8753212..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/Makefile +++ /dev/null @@ -1,332 +0,0 @@ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := .. -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= . - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= Release - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - - - -CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) -CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) -LINK.target ?= $(LINK) -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= flock $(builddir)/linker.lock $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= gcc -CFLAGS.host ?= -CXX.host ?= g++ -CXXFLAGS.host ?= -LINK.host ?= $(CXX.host) -LDFLAGS.host ?= -AR.host ?= ar - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),?,$1) -unreplace_spaces = $(subst ?,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters. -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef - -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = rm -rf "$@" && cp -af "$<" "$@" - -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) - - -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' - -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain ? instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\ - for p in $(POSTBUILDS); do\ - eval $$p;\ - E=$$?;\ - if [ $$E -ne 0 ]; then\ - break;\ - fi;\ - done;\ - if [ $$E -ne 0 ]; then\ - rm -rf "$@";\ - exit $$E;\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains ? for -# spaces already and dirx strips the ? characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 1,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "all" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: all -all: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -TOOLSET := target -# Suffix rules, putting all outputs into $(obj). -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -# Try building from generated source, too. -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - -$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD - @$(call do_cmd,cxx,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD - @$(call do_cmd,cc,1) -$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD - @$(call do_cmd,cc,1) - - -ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ - $(findstring $(join ^,$(prefix)),\ - $(join ^,kerberos.target.mk)))),) - include kerberos.target.mk -endif - -quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/home/vagrant/lab4/node_modules/mongodb/node_modules/kerberos/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/home/vagrant/.node-gyp/0.10.24/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/home/vagrant/.node-gyp/0.10.24" "-Dmodule_root_dir=/home/vagrant/lab4/node_modules/mongodb/node_modules/kerberos" binding.gyp -Makefile: $(srcdir)/../../../../../.node-gyp/0.10.24/common.gypi $(srcdir)/../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp - $(call do_cmd,regen_makefile) - -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif diff --git a/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d b/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d deleted file mode 100644 index 0bc320607..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/kerberos.node := rm -rf "Release/kerberos.node" && cp -af "Release/obj.target/kerberos.node" "Release/kerberos.node" diff --git a/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d b/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d deleted file mode 100644 index ba6dec80c..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/obj.target/kerberos.node := flock ./Release/linker.lock g++ -shared -pthread -rdynamic -m32 -Wl,-soname=kerberos.node -o Release/obj.target/kerberos.node -Wl,--start-group -Wl,--end-group diff --git a/node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node b/node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node deleted file mode 100644 index 5d872403f814739cf4d6d7459d9c5872bbd73c14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6608 zcmeHLYiv|S6rRf#C{jy>MG=ckl{O&rAnA$vZxeu#4Kso{*-SPDV|7B zCZxS&nh+)TDrQ45c+#kS0;CDM2|M{TFqw7?B{>X#%@t@PYulFdezX-pa~L+Vwm$+p z;NM7(-wbSB_>f3}Xx!8$YVyZkjlcKv#QjIIbL+mCvGIkuUpCXG{rTR- zQSlZrV-$ne)}Om{{JWR-eZK3l@(=s3lwSPwr&2VJqw)Zn2c5AsnNSvK07<+aTtWp@ zCa&b4gbbTR3U7f7+OZVkD<&H>T}%;oLayW_LDNNv5VqZwbW^tHN7BA+3p<{Q`@-&o zfT^Q9LN$_z?{$Q|VYl7vbj3a2N!KMJp67TX5pVCXJlCogwiAu`5%g*IJT)4cQc(|!V%dDl=a?U_a%)nc#58vMqt6|~1N#4EjDfDv{6vd{>Z4P^8 z5|VX^62lBrazS99jP@c*w6|n073a__w)PB7yGMw=?A3fezkkr5m|KC?+#nx+;*mX7 zymbf?lpkP%@`t&Gr2JvzNyMmL_y)c6hdH|#%@@uBw)LKWv?ccrV9(IoSkUaHvS*La z0u-5Tec2Heue2}A{n=Yokvoc(zARV1f1m<-{~;641)rU~J|TPAs2TRm{twtYJ81Mu zxBSaldZ<1%!|Cro%5r!kd#Y^f_H9Fm-;%S?&@;41h7b20s)YakPyCsE*=y7W{i*%3 ztv6heKbt)j-nwn5ue@UDAW6;tNBH#&{V<0)ngtz+^6%~+pxXcbd^w-XMftORlVHe< zFm5on1L&9gmho$%biD{i1HMdiJV?%PQuvxj=;s6f#iAKp_K#3=}d@$Uq?jg$(@P8Nk2YJjc|M zI{(A22J`Y30rNJ%JJs!wyi5IhJ@2lB6Yqf6AbI~wK;8!VGnKk&P=BC@h5K?MeEPc1S<1WK3%**LSS0RK{x0 z#1qlQ@u-j}7V%=jitb6Fr;>hJS#~>VFYczs0UNfolc2v+b|!pb$@{70JKfObE!A>j zc4s=0bnI9ZJv0(lhwr96gi+$D4j3_1BpL5O7#H2~u;iHX=4#>MicW4a>7;y}ntzqJ z_Mj%B)-+hF&>csw+Je`!->_a-Ho?6@u7b`zL82ddC7An#e%wRkgV1S%Dkb+)Etq}j z$2~{pUeiQh(nDbG8~Sn2kh#BfT(*H>HijA)^QB4q zy@3Yp7sgo)|AXJV0l#wimBUYu!7;Ty0G&BzTv>C2-|2vzew@b%u#U?bOAdbgzSBfM z@=37v<9FH!{Pg{vzS{5OfS;`0U;${;r1i6CV0@Vu6EWd;tGt|p592NYljyflsq#r1 znct-xOH(!czSaPeJ|s%6t)`}#LYU~ty})rfKXjFQIXGSd1(>KF1onUmG@kgqKvGpz zb@mljk$J`yRuOoX6;|2dnN?U;SLNAMm@BHzpu+qnQfEzJl|fzq3(2=oXGht264e<| znBP3=EGVp2o;6m5`HQFaw!-{HRQp(AYyuGXD=8#1hdnulxhlx-80I$-_ShKaFP{31 zqzsMjDDE3lNMLb5C8iX>tY+Sz5PnI6>rjOHDBplJXp$kO$#vE9GcdnOwCnj9*flGF zYeyG)0=|hK24*~tYareekY&J332_W^FyMc4eEyP@1>*Mr9~sT3fqxV)0te&22CNSV z;~xY3Juv=h;8|nil?k2)ogV}F`4(8`*TDN<1m^i95wpy%^T+xzYx2Q*aB1KL=AqBR z05*YZfWuTk?EfHeaJ&sS`ELi-`;YZ!(UJB&KhxQ1b)Z&U*SfjcZd%{c%F467ZXDWB z7cfO#yDQxYkWp+qUcIu~>T-O0XNT>_GN~O_d$*qLSWnNjW06!efin5J z$2YCnxV}!$ diff --git a/node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock b/node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock deleted file mode 100644 index e69de29bb..000000000 diff --git a/node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node b/node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node deleted file mode 100644 index 5d872403f814739cf4d6d7459d9c5872bbd73c14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6608 zcmeHLYiv|S6rRf#C{jy>MG=ckl{O&rAnA$vZxeu#4Kso{*-SPDV|7B zCZxS&nh+)TDrQ45c+#kS0;CDM2|M{TFqw7?B{>X#%@t@PYulFdezX-pa~L+Vwm$+p z;NM7(-wbSB_>f3}Xx!8$YVyZkjlcKv#QjIIbL+mCvGIkuUpCXG{rTR- zQSlZrV-$ne)}Om{{JWR-eZK3l@(=s3lwSPwr&2VJqw)Zn2c5AsnNSvK07<+aTtWp@ zCa&b4gbbTR3U7f7+OZVkD<&H>T}%;oLayW_LDNNv5VqZwbW^tHN7BA+3p<{Q`@-&o zfT^Q9LN$_z?{$Q|VYl7vbj3a2N!KMJp67TX5pVCXJlCogwiAu`5%g*IJT)4cQc(|!V%dDl=a?U_a%)nc#58vMqt6|~1N#4EjDfDv{6vd{>Z4P^8 z5|VX^62lBrazS99jP@c*w6|n073a__w)PB7yGMw=?A3fezkkr5m|KC?+#nx+;*mX7 zymbf?lpkP%@`t&Gr2JvzNyMmL_y)c6hdH|#%@@uBw)LKWv?ccrV9(IoSkUaHvS*La z0u-5Tec2Heue2}A{n=Yokvoc(zARV1f1m<-{~;641)rU~J|TPAs2TRm{twtYJ81Mu zxBSaldZ<1%!|Cro%5r!kd#Y^f_H9Fm-;%S?&@;41h7b20s)YakPyCsE*=y7W{i*%3 ztv6heKbt)j-nwn5ue@UDAW6;tNBH#&{V<0)ngtz+^6%~+pxXcbd^w-XMftORlVHe< zFm5on1L&9gmho$%biD{i1HMdiJV?%PQuvxj=;s6f#iAKp_K#3=}d@$Uq?jg$(@P8Nk2YJjc|M zI{(A22J`Y30rNJ%JJs!wyi5IhJ@2lB6Yqf6AbI~wK;8!VGnKk&P=BC@h5K?MeEPc1S<1WK3%**LSS0RK{x0 z#1qlQ@u-j}7V%=jitb6Fr;>hJS#~>VFYczs0UNfolc2v+b|!pb$@{70JKfObE!A>j zc4s=0bnI9ZJv0(lhwr96gi+$D4j3_1BpL5O7#H2~u;iHX=4#>MicW4a>7;y}ntzqJ z_Mj%B)-+hF&>csw+Je`!->_a-Ho?6@u7b`zL82ddC7An#e%wRkgV1S%Dkb+)Etq}j z$2~{pUeiQh(nDbG8~Sn2kh#BfT(*H>HijA)^QB4q zy@3Yp7sgo)|AXJV0l#wimBUYu!7;Ty0G&BzTv>C2-|2vzew@b%u#U?bOAdbgzSBfM z@=37v<9FH!{Pg{vzS{5OfS;`0U;${;r1i6CV0@Vu6EWd;tGt|p592NYljyflsq#r1 znct-xOH(!czSaPeJ|s%6t)`}#LYU~ty})rfKXjFQIXGSd1(>KF1onUmG@kgqKvGpz zb@mljk$J`yRuOoX6;|2dnN?U;SLNAMm@BHzpu+qnQfEzJl|fzq3(2=oXGht264e<| znBP3=EGVp2o;6m5`HQFaw!-{HRQp(AYyuGXD=8#1hdnulxhlx-80I$-_ShKaFP{31 zqzsMjDDE3lNMLb5C8iX>tY+Sz5PnI6>rjOHDBplJXp$kO$#vE9GcdnOwCnj9*flGF zYeyG)0=|hK24*~tYareekY&J332_W^FyMc4eEyP@1>*Mr9~sT3fqxV)0te&22CNSV z;~xY3Juv=h;8|nil?k2)ogV}F`4(8`*TDN<1m^i95wpy%^T+xzYx2Q*aB1KL=AqBR z05*YZfWuTk?EfHeaJ&sS`ELi-`;YZ!(UJB&KhxQ1b)Z&U*SfjcZd%{c%F467ZXDWB z7cfO#yDQxYkWp+qUcIu~>T-O0XNT>_GN~O_d$*qLSWnNjW06!efin5J z$2YCnxV}!$ diff --git a/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile b/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile deleted file mode 100644 index d0d9c64a7..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/binding.Makefile +++ /dev/null @@ -1,6 +0,0 @@ -# This file is generated by gyp; do not edit. - -export builddir_name ?= build/./. -.PHONY: all -all: - $(MAKE) kerberos diff --git a/node_modules/mongodb/node_modules/kerberos/build/config.gypi b/node_modules/mongodb/node_modules/kerberos/build/config.gypi deleted file mode 100644 index 91ce77280..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/config.gypi +++ /dev/null @@ -1,115 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "clang": 0, - "gcc_version": 46, - "host_arch": "ia32", - "node_install_npm": "true", - "node_prefix": "/usr/local", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_v8": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_unsafe_optimizations": 0, - "node_use_dtrace": "false", - "node_use_etw": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_systemtap": "false", - "python": "/usr/bin/python", - "target_arch": "ia32", - "v8_enable_gdbjit": 0, - "v8_no_strict_aliasing": 1, - "v8_use_snapshot": "true", - "nodedir": "/home/vagrant/.node-gyp/0.10.24", - "copy_dev_lib": "true", - "standalone_static_library": 1, - "cache_lock_stale": "60000", - "sign_git_tag": "", - "always_auth": "", - "user_agent": "node/v0.10.24 linux ia32", - "bin_links": "true", - "key": "", - "description": "true", - "fetch_retries": "2", - "heading": "npm", - "user": "", - "force": "", - "cache_min": "10", - "init_license": "ISC", - "editor": "vi", - "rollback": "true", - "cache_max": "null", - "userconfig": "/home/vagrant/.npmrc", - "engine_strict": "", - "init_author_name": "", - "init_author_url": "", - "tmp": "/home/vagrant/tmp", - "depth": "null", - "save_dev": "", - "usage": "", - "https_proxy": "", - "onload_script": "", - "rebuild_bundle": "true", - "save_bundle": "", - "shell": "/bin/bash", - "prefix": "/usr/local", - "registry": "https://registry.npmjs.org/", - "browser": "", - "cache_lock_wait": "10000", - "save_optional": "", - "searchopts": "", - "versions": "", - "cache": "/home/vagrant/.npm", - "ignore_scripts": "", - "searchsort": "name", - "version": "", - "local_address": "", - "viewer": "man", - "color": "true", - "fetch_retry_mintimeout": "10000", - "umask": "18", - "fetch_retry_maxtimeout": "60000", - "message": "%s", - "cert": "", - "global": "", - "link": "", - "save": "", - "unicode": "true", - "long": "", - "production": "", - "unsafe_perm": "true", - "node_version": "v0.10.24", - "tag": "latest", - "git_tag_version": "true", - "shrinkwrap": "true", - "fetch_retry_factor": "10", - "npat": "", - "proprietary_attribs": "true", - "strict_ssl": "true", - "username": "", - "dev": "", - "globalconfig": "/usr/local/etc/npmrc", - "init_module": "/home/vagrant/.npm-init.js", - "parseable": "", - "globalignorefile": "/usr/local/etc/npmignore", - "cache_lock_retries": "10", - "group": "1000", - "init_author_email": "", - "searchexclude": "", - "git": "git", - "optional": "true", - "email": "", - "json": "" - } -} diff --git a/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk b/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk deleted file mode 100644 index d2ce49b92..000000000 --- a/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk +++ /dev/null @@ -1,42 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := kerberos -### Rules for final target. -LDFLAGS_Debug := \ - -pthread \ - -rdynamic \ - -m32 - -LDFLAGS_Release := \ - -pthread \ - -rdynamic \ - -m32 - -LIBS := - -$(obj).target/kerberos.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(obj).target/kerberos.node: LIBS := $(LIBS) -$(obj).target/kerberos.node: TOOLSET := $(TOOLSET) -$(obj).target/kerberos.node: FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(obj).target/kerberos.node -# Add target alias -.PHONY: kerberos -kerberos: $(builddir)/kerberos.node - -# Copy this to the executable output path. -$(builddir)/kerberos.node: TOOLSET := $(TOOLSET) -$(builddir)/kerberos.node: $(obj).target/kerberos.node FORCE_DO_CMD - $(call do_cmd,copy) - -all_deps += $(builddir)/kerberos.node -# Short alias for building this executable. -.PHONY: kerberos.node -kerberos.node: $(obj).target/kerberos.node $(builddir)/kerberos.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/kerberos.node - diff --git a/node_modules/mongodb/node_modules/kerberos/package.json b/node_modules/mongodb/node_modules/kerberos/package.json deleted file mode 100644 index b29cfc2c8..000000000 --- a/node_modules/mongodb/node_modules/kerberos/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "kerberos", - "version": "0.0.3", - "description": "Kerberos library for Node.js", - "main": "index.js", - "scripts": { - "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", - "test": "nodeunit ./test" - }, - "repository": { - "type": "git", - "url": "https://github.com/christkv/kerberos.git" - }, - "keywords": [ - "kerberos", - "security", - "authentication" - ], - "devDependencies": { - "nodeunit": "latest" - }, - "author": { - "name": "Christian Amor Kvalheim" - }, - "license": "Apache 2.0", - "readmeFilename": "README.md", - "gitHead": "bb01d4fe322e022999aca19da564e7d9db59a8ed", - "readme": "kerberos\n========\n\nKerberos library for node.js", - "bugs": { - "url": "https://github.com/christkv/kerberos/issues" - }, - "homepage": "https://github.com/christkv/kerberos", - "_id": "kerberos@0.0.3", - "dist": { - "shasum": "53d6ed947fa0d8306a67680dffb4bd32a51e3839" - }, - "_from": "kerberos@0.0.3", - "_resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz" -} diff --git a/node_modules/mongodb/package.json b/node_modules/mongodb/package.json old mode 100644 new mode 100755 index 4c4244e9f..1aa66f04a --- a/node_modules/mongodb/package.json +++ b/node_modules/mongodb/package.json @@ -1,17 +1,39 @@ { - "name": "mongodb", - "description": "A node.js driver for MongoDB", - "keywords": [ - "mongodb", - "mongo", - "driver", - "db" + "_from": "mongodb@1.3.23", + "_id": "mongodb@1.3.23", + "_inBundle": false, + "_integrity": "sha1-h0pSEhYrFhiK7q7l4GBndmyOnoY=", + "_location": "/mongodb", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mongodb@1.3.23", + "name": "mongodb", + "escapedName": "mongodb", + "rawSpec": "1.3.23", + "saveSpec": null, + "fetchSpec": "1.3.23" + }, + "_requiredBy": [ + "/", + "/mongoose" ], - "version": "1.3.23", + "_resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.23.tgz", + "_shasum": "874a5212162b16188aeeaee5e06067766c8e9e86", + "_spec": "mongodb@1.3.23", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton", "author": { "name": "Christian Amor Kvalheim", "email": "christkv@gmail.com" }, + "bugs": { + "url": "http://github.com/mongodb/node-mongodb-native/issues" + }, + "bundleDependencies": false, + "config": { + "native": false + }, "contributors": [ { "name": "Aaron Heckmann" @@ -173,55 +195,55 @@ "name": "Gregory Langlais" } ], - "repository": { - "type": "git", - "url": "http://github.com/mongodb/node-mongodb-native.git" - }, - "bugs": { - "url": "http://github.com/mongodb/node-mongodb-native/issues" - }, "dependencies": { "bson": "0.2.5", "kerberos": "0.0.3" }, + "deprecated": "Please upgrade to 2.2.19 or higher", + "description": "A node.js driver for MongoDB", "devDependencies": { + "async": "0.1.22", "dox": "0.4.4", - "uglify-js": "1.2.5", "ejs": "0.6.1", - "request": "2.12.0", - "nodeunit": "0.7.4", - "markdown": "0.3.1", "gleak": "0.2.3", - "step": "0.0.5", - "async": "0.1.22", "integra": "latest", - "optimist": "latest" - }, - "optionalDependencies": { - "kerberos": "0.0.3" - }, - "config": { - "native": false + "markdown": "0.3.1", + "nodeunit": "0.7.4", + "optimist": "latest", + "request": "2.12.0", + "step": "0.0.5", + "uglify-js": "1.2.5" }, - "main": "./lib/mongodb/index", - "homepage": "http://mongodb.github.com/node-mongodb-native/", "directories": { "lib": "./lib/mongodb" }, "engines": { "node": ">=0.6.19" }, - "scripts": { - "test": "make test_functional" - }, + "homepage": "http://mongodb.github.com/node-mongodb-native/", + "keywords": [ + "mongodb", + "mongo", + "driver", + "db" + ], "licenses": [ { "type": "Apache License, Version 2.0", "url": "http://www.apache.org/licenses/LICENSE-2.0" } ], - "readme": "## MongoDB Node.JS Driver\n \n| what | where |\n|---------------|------------------------------------------------|\n| documentation | http://mongodb.github.io/node-mongodb-native/ |\n| apidoc | http://mongodb.github.io/node-mongodb-native/ |\n| source | https://github.com/mongodb/node-mongodb-native |\n| mongodb | http://www.mongodb.org/ |\n\n### Bugs / Feature Requests\n\nThink you’ve found a bug? Want to see a new feature in PyMongo? Please open a\ncase in our issue management tool, JIRA:\n\n- Create an account and login .\n- Navigate to the NODE project .\n- Click **Create Issue** - Please provide as much information as possible about the issue type and how to reproduce it.\n\nBug reports in JIRA for all driver projects (i.e. NODE, PYTHON, CSHARP, JAVA) and the\nCore Server (i.e. SERVER) project are **public**.\n\n### Questions and Bug Reports\n\n * mailing list: https://groups.google.com/forum/#!forum/node-mongodb-native\n * jira: http://jira.mongodb.org/\n\n### Change Log\n\nhttp://jira.mongodb.org/browse/NODE\n\n## Install\n\nTo install the most recent release from npm, run:\n\n npm install mongodb\n\nThat may give you a warning telling you that bugs['web'] should be bugs['url'], it would be safe to ignore it (this has been fixed in the development version)\n\nTo install the latest from the repository, run::\n\n npm install path/to/node-mongodb-native\n\n## Live Examples\n\n\n## Introduction\n\nThis is a node.js driver for MongoDB. It's a port (or close to a port) of the library for ruby at http://github.com/mongodb/mongo-ruby-driver/.\n\nA simple example of inserting a document.\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format;\n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n var collection = db.collection('test_insert');\n collection.insert({a:2}, function(err, docs) {\n \n collection.count(function(err, count) {\n console.log(format(\"count = %s\", count));\n });\n\n // Locate all the entries using find\n collection.find().toArray(function(err, results) {\n console.dir(results);\n // Let's close the db\n db.close();\n });\n });\n })\n```\n\n## Data types\n\nTo store and retrieve the non-JSON MongoDb primitives ([ObjectID](http://www.mongodb.org/display/DOCS/Object+IDs), Long, Binary, [Timestamp](http://www.mongodb.org/display/DOCS/Timestamp+data+type), [DBRef](http://www.mongodb.org/display/DOCS/Database+References#DatabaseReferences-DBRef), Code).\n\nIn particular, every document has a unique `_id` which can be almost any type, and by default a 12-byte ObjectID is created. ObjectIDs can be represented as 24-digit hexadecimal strings, but you must convert the string back into an ObjectID before you can use it in the database. For example:\n\n```javascript\n // Get the objectID type\n var ObjectID = require('mongodb').ObjectID;\n\n var idString = '4e4e1638c85e808431000003';\n collection.findOne({_id: new ObjectID(idString)}, console.log) // ok\n collection.findOne({_id: idString}, console.log) // wrong! callback gets undefined\n```\n\nHere are the constructors the non-Javascript BSON primitive types:\n\n```javascript\n // Fetch the library\n var mongo = require('mongodb');\n // Create new instances of BSON types\n new mongo.Long(numberString)\n new mongo.ObjectID(hexString)\n new mongo.Timestamp() // the actual unique number is generated on insert.\n new mongo.DBRef(collectionName, id, dbName)\n new mongo.Binary(buffer) // takes a string or Buffer\n new mongo.Code(code, [context])\n new mongo.Symbol(string)\n new mongo.MinKey()\n new mongo.MaxKey()\n new mongo.Double(number)\t// Force double storage\n```\n\n### The C/C++ bson parser/serializer\n\nIf you are running a version of this library has the C/C++ parser compiled, to enable the driver to use the C/C++ bson parser pass it the option native_parser:true like below\n\n```javascript\n // using native_parser:\n MongoClient.connect('mongodb://127.0.0.1:27017/test'\n , {db: {native_parser: true}}, function(err, db) {})\n```\n\nThe C++ parser uses the js objects both for serialization and deserialization.\n\n## GitHub information\n\nThe source code is available at http://github.com/mongodb/node-mongodb-native.\nYou can either clone the repository or download a tarball of the latest release.\n\nOnce you have the source you can test the driver by running\n\n $ make test\n\nin the main directory. You will need to have a mongo instance running on localhost for the integration tests to pass.\n\n## Examples\n\nFor examples look in the examples/ directory. You can execute the examples using node.\n\n $ cd examples\n $ node queries.js\n\n## GridStore\n\nThe GridStore class allows for storage of binary files in mongoDB using the mongoDB defined files and chunks collection definition.\n\nFor more information have a look at [Gridstore](https://github.com/mongodb/node-mongodb-native/blob/master/docs/gridfs.md)\n\n## Replicasets\n\nFor more information about how to connect to a replicaset have a look at the extensive documentation [Documentation](http://mongodb.github.com/node-mongodb-native/)\n\n### Primary Key Factories\n\nDefining your own primary key factory allows you to generate your own series of id's\n(this could f.ex be to use something like ISBN numbers). The generated the id needs to be a 12 byte long \"string\".\n\nSimple example below\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n // Custom factory (need to provide a 12 byte array);\n CustomPKFactory = function() {}\n CustomPKFactory.prototype = new Object();\n CustomPKFactory.createPk = function() {\n return new ObjectID(\"aaaaaaaaaaaa\");\n }\n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n db.dropDatabase(function(err, done) {\n \n db.createCollection('test_custom_key', function(err, collection) {\n \n collection.insert({'a':1}, function(err, docs) {\n \n collection.find({'_id':new ObjectID(\"aaaaaaaaaaaa\")}).toArray(function(err, items) {\n console.dir(items);\n // Let's close the db\n db.close();\n });\n });\n });\n });\n });\n```\n\n## Documentation\n\nIf this document doesn't answer your questions, see the source of\n[Collection](https://github.com/mongodb/node-mongodb-native/blob/master/lib/mongodb/collection.js)\nor [Cursor](https://github.com/mongodb/node-mongodb-native/blob/master/lib/mongodb/cursor.js),\nor the documentation at MongoDB for query and update formats.\n\n### Find\n\nThe find method is actually a factory method to create\nCursor objects. A Cursor lazily uses the connection the first time\nyou call `nextObject`, `each`, or `toArray`.\n\nThe basic operation on a cursor is the `nextObject` method\nthat fetches the next matching document from the database. The convenience\nmethods `each` and `toArray` call `nextObject` until the cursor is exhausted.\n\nSignatures:\n\n```javascript\n var cursor = collection.find(query, [fields], options);\n cursor.sort(fields).limit(n).skip(m).\n\n cursor.nextObject(function(err, doc) {});\n cursor.each(function(err, doc) {});\n cursor.toArray(function(err, docs) {});\n\n cursor.rewind() // reset the cursor to its initial state.\n```\n\nUseful chainable methods of cursor. These can optionally be options of `find` instead of method calls:\n\n * `.limit(n).skip(m)` to control paging.\n * `.sort(fields)` Order by the given fields. There are several equivalent syntaxes:\n * `.sort({field1: -1, field2: 1})` descending by field1, then ascending by field2.\n * `.sort([['field1', 'desc'], ['field2', 'asc']])` same as above\n * `.sort([['field1', 'desc'], 'field2'])` same as above\n * `.sort('field1')` ascending by field1\n\nOther options of `find`:\n\n* `fields` the fields to fetch (to avoid transferring the entire document)\n* `tailable` if true, makes the cursor [tailable](http://www.mongodb.org/display/DOCS/Tailable+Cursors).\n* `batchSize` The number of the subset of results to request the database\nto return for every request. This should initially be greater than 1 otherwise\nthe database will automatically close the cursor. The batch size can be set to 1\nwith `batchSize(n, function(err){})` after performing the initial query to the database.\n* `hint` See [Optimization: hint](http://www.mongodb.org/display/DOCS/Optimization#Optimization-Hint).\n* `explain` turns this into an explain query. You can also call\n`explain()` on any cursor to fetch the explanation.\n* `snapshot` prevents documents that are updated while the query is active\nfrom being returned multiple times. See more\n[details about query snapshots](http://www.mongodb.org/display/DOCS/How+to+do+Snapshotted+Queries+in+the+Mongo+Database).\n* `timeout` if false, asks MongoDb not to time out this cursor after an\ninactivity period.\n\nFor information on how to create queries, see the\n[MongoDB section on querying](http://www.mongodb.org/display/DOCS/Querying).\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n var collection = db\n .collection('test')\n .find({})\n .limit(10)\n .toArray(function(err, docs) {\n console.dir(docs);\n });\n });\n```\n\n### Insert\n\nSignature:\n\n```javascript\n collection.insert(docs, options, [callback]);\n```\n\nwhere `docs` can be a single document or an array of documents.\n\nUseful options:\n\n* `safe:true` Should always set if you have a callback.\n\nSee also: [MongoDB docs for insert](http://www.mongodb.org/display/DOCS/Inserting).\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n \n db.collection('test').insert({hello: 'world'}, {w:1}, function(err, objects) {\n if (err) console.warn(err.message);\n if (err && err.message.indexOf('E11000 ') !== -1) {\n // this _id was already inserted in the database\n }\n });\n });\n```\n\nNote that there's no reason to pass a callback to the insert or update commands\nunless you use the `safe:true` option. If you don't specify `safe:true`, then\nyour callback will be called immediately.\n\n### Update: update and insert (upsert)\n\nThe update operation will update the first document that matches your query\n(or all documents that match if you use `multi:true`).\nIf `safe:true`, `upsert` is not set, and no documents match, your callback will return 0 documents updated.\n\nSee the [MongoDB docs](http://www.mongodb.org/display/DOCS/Updating) for\nthe modifier (`$inc`, `$set`, `$push`, etc.) formats.\n\nSignature:\n\n```javascript\n collection.update(criteria, objNew, options, [callback]);\n```\n\nUseful options:\n\n* `safe:true` Should always set if you have a callback.\n* `multi:true` If set, all matching documents are updated, not just the first.\n* `upsert:true` Atomically inserts the document if no documents matched.\n\nExample for `update`:\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n db.collection('test').update({hi: 'here'}, {$set: {hi: 'there'}}, {w:1}, function(err) {\n if (err) console.warn(err.message);\n else console.log('successfully updated');\n });\n });\n```\n\n### Find and modify\n\n`findAndModify` is like `update`, but it also gives the updated document to\nyour callback. But there are a few key differences between findAndModify and\nupdate:\n\n 1. The signatures differ.\n 2. You can only findAndModify a single item, not multiple items.\n\nSignature:\n\n```javascript\n collection.findAndModify(query, sort, update, options, callback)\n```\n\nThe sort parameter is used to specify which object to operate on, if more than\none document matches. It takes the same format as the cursor sort (see\nConnection.find above).\n\nSee the\n[MongoDB docs for findAndModify](http://www.mongodb.org/display/DOCS/findAndModify+Command)\nfor more details.\n\nUseful options:\n\n* `remove:true` set to a true to remove the object before returning\n* `new:true` set to true if you want to return the modified object rather than the original. Ignored for remove.\n* `upsert:true` Atomically inserts the document if no documents matched.\n\nExample for `findAndModify`:\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n db.collection('test').findAndModify({hello: 'world'}, [['_id','asc']], {$set: {hi: 'there'}}, {}, function(err, object) {\n if (err) console.warn(err.message);\n else console.dir(object); // undefined if no matching object exists.\n });\n });\n```\n\n### Save\n\nThe `save` method is a shorthand for upsert if the document contains an\n`_id`, or an insert if there is no `_id`.\n\n## Release Notes\n\nSee HISTORY\n\n## Credits\n\n1. [10gen](http://github.com/mongodb/mongo-ruby-driver/)\n2. [Google Closure Library](http://code.google.com/closure/library/)\n3. [Jonas Raoni Soares Silva](http://jsfromhell.com/classes/binary-parser)\n\n## Contributors\n\nAaron Heckmann, Christoph Pojer, Pau Ramon Revilla, Nathan White, Emmerman, Seth LaForge, Boris Filipov, Stefan Schärmeli, Tedde Lundgren, renctan, Sergey Ukustov, Ciaran Jessup, kuno, srimonti, Erik Abele, Pratik Daga, Slobodan Utvic, Kristina Chodorow, Yonathan Randolph, Brian Noguchi, Sam Epstein, James Harrison Fisher, Vladimir Dronnikov, Ben Hockey, Henrik Johansson, Simon Weare, Alex Gorbatchev, Shimon Doodkin, Kyle Mueller, Eran Hammer-Lahav, Marcin Ciszak, François de Metz, Vinay Pulim, nstielau, Adam Wiggins, entrinzikyl, Jeremy Selier, Ian Millington, Public Keating, andrewjstone, Christopher Stott, Corey Jewett, brettkiefer, Rob Holland, Senmiao Liu, heroic, gitfy\n\n## License\n\n Copyright 2009 - 2013 MongoDb Inc.\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n", - "readmeFilename": "Readme.md", - "_id": "mongodb@1.3.23", - "_from": "mongodb@*" + "main": "./lib/mongodb/index", + "name": "mongodb", + "optionalDependencies": { + "kerberos": "0.0.3" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mongodb/node-mongodb-native.git" + }, + "scripts": { + "test": "make test_functional" + }, + "version": "1.3.23" } diff --git a/node_modules/mongoose/contRun.sh b/node_modules/mongoose/contRun.sh old mode 100644 new mode 100755 diff --git a/node_modules/mongoose/node_modules/hooks/package.json b/node_modules/mongoose/node_modules/hooks/package.json deleted file mode 100644 index 561ccc9fd..000000000 --- a/node_modules/mongoose/node_modules/hooks/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "hooks", - "description": "Adds pre and post hook functionality to your JavaScript methods.", - "version": "0.2.1", - "keywords": [ - "node", - "hooks", - "middleware", - "pre", - "post" - ], - "homepage": "https://github.com/bnoguchi/hooks-js/", - "repository": { - "type": "git", - "url": "git://github.com/bnoguchi/hooks-js.git" - }, - "author": { - "name": "Brian Noguchi", - "email": "brian.noguchi@gmail.com", - "url": "https://github.com/bnoguchi/" - }, - "main": "./hooks.js", - "directories": { - "lib": "." - }, - "scripts": { - "test": "make test" - }, - "dependencies": {}, - "devDependencies": { - "expresso": ">=0.7.6", - "should": ">=0.2.1", - "underscore": ">=1.1.4" - }, - "engines": { - "node": ">=0.4.0" - }, - "licenses": [ - "MIT" - ], - "optionalDependencies": {}, - "readme": "hooks\n============\n\nAdd pre and post middleware hooks to your JavaScript methods.\n\n## Installation\n npm install hooks\n\n## Motivation\nSuppose you have a JavaScript object with a `save` method.\n\nIt would be nice to be able to declare code that runs before `save` and after `save`.\nFor example, you might want to run validation code before every `save`,\nand you might want to dispatch a job to a background job queue after `save`.\n\nOne might have an urge to hard code this all into `save`, but that turns out to\ncouple all these pieces of functionality (validation, save, and job creation) more\ntightly than is necessary. For example, what if someone does not want to do background\njob creation after the logical save? \n\nIt is nicer to tack on functionality using what we call `pre` and `post` hooks. These\nare functions that you define and that you direct to execute before or after particular\nmethods.\n\n## Example\nWe can use `hooks` to add validation and background jobs in the following way:\n\n var hooks = require('hooks')\n , Document = require('./path/to/some/document/constructor');\n\n // Add hooks' methods: `hook`, `pre`, and `post` \n for (var k in hooks) {\n Document[k] = hooks[k];\n }\n\n // Define a new method that is able to invoke pre and post middleware\n Document.hook('save', Document.prototype.save);\n\n // Define a middleware function to be invoked before 'save'\n Document.pre('save', function validate (next) {\n // The `this` context inside of `pre` and `post` functions\n // is the Document instance\n if (this.isValid()) next(); // next() passes control to the next middleware\n // or to the target method itself\n else next(new Error(\"Invalid\")); // next(error) invokes an error callback\n });\n\n // Define a middleware function to be invoked after 'save'\n Document.post('save', function createJob () {\n this.sendToBackgroundQueue();\n });\n\nIf you already have defined `Document.prototype` methods for which you want pres and posts,\nthen you do not need to explicitly invoke `Document.hook(...)`. Invoking `Document.pre(methodName, fn)`\nor `Document.post(methodName, fn)` will automatically and lazily change `Document.prototype[methodName]`\nso that it plays well with `hooks`. An equivalent way to implement the previous example is:\n\n```javascript\nvar hooks = require('hooks')\n , Document = require('./path/to/some/document/constructor');\n\n// Add hooks' methods: `hook`, `pre`, and `post` \nfor (var k in hooks) {\n Document[k] = hooks[k];\n}\n\nDocument.prototype.save = function () {\n // ...\n};\n\n// Define a middleware function to be invoked before 'save'\nDocument.pre('save', function validate (next) {\n // The `this` context inside of `pre` and `post` functions\n // is the Document instance\n if (this.isValid()) next(); // next() passes control to the next middleware\n // or to the target method itself\n else next(new Error(\"Invalid\")); // next(error) invokes an error callback\n});\n\n// Define a middleware function to be invoked after 'save'\nDocument.post('save', function createJob () {\n this.sendToBackgroundQueue();\n});\n```\n\n## Pres and Posts as Middleware\nWe structure pres and posts as middleware to give you maximum flexibility:\n\n1. You can define **multiple** pres (or posts) for a single method.\n2. These pres (or posts) are then executed as a chain of methods.\n3. Any functions in this middleware chain can choose to halt the chain's execution by `next`ing an Error from that middleware function. If this occurs, then none of the other middleware in the chain will execute, and the main method (e.g., `save`) will not execute. This is nice, for example, when we don't want a document to save if it is invalid.\n\n## Defining multiple pres (or posts)\n`pre` is chainable, so you can define multiple pres via:\n Document.pre('save', function (next, halt) {\n console.log(\"hello\");\n }).pre('save', function (next, halt) {\n console.log(\"world\");\n });\n\nAs soon as one pre finishes executing, the next one will be invoked, and so on.\n\n## Error Handling\nYou can define a default error handler by passing a 2nd function as the 3rd argument to `hook`:\n Document.hook('set', function (path, val) {\n this[path] = val;\n }, function (err) {\n // Handler the error here\n console.error(err);\n });\n\nThen, we can pass errors to this handler from a pre or post middleware function:\n Document.pre('set', function (next, path, val) {\n next(new Error());\n });\n\nIf you do not set up a default handler, then `hooks` makes the default handler that just throws the `Error`.\n\nThe default error handler can be over-rided on a per method invocation basis.\n\nIf the main method that you are surrounding with pre and post middleware expects its last argument to be a function\nwith callback signature `function (error, ...)`, then that callback becomes the error handler, over-riding the default\nerror handler you may have set up.\n \n```javascript\nDocument.hook('save', function (callback) {\n // Save logic goes here\n ...\n});\n\nvar doc = new Document();\ndoc.save( function (err, saved) {\n // We can pass err via `next` in any of our pre or post middleware functions\n if (err) console.error(err);\n \n // Rest of callback logic follows ...\n});\n```\n\n## Mutating Arguments via Middleware\n`pre` and `post` middleware can also accept the intended arguments for the method\nthey augment. This is useful if you want to mutate the arguments before passing\nthem along to the next middleware and eventually pass a mutated arguments list to\nthe main method itself.\n\nAs a simple example, let's define a method `set` that just sets a key, value pair.\nIf we want to namespace the key, we can do so by adding a `pre` middleware hook\nthat runs before `set`, alters the arguments by namespacing the `key` argument, and passes them onto `set`:\n\n Document.hook('set', function (key, val) {\n this[key] = val;\n });\n Document.pre('set', function (next, key, val) {\n next('namespace-' + key, val);\n });\n var doc = new Document();\n doc.set('hello', 'world');\n console.log(doc.hello); // undefined\n console.log(doc['namespace-hello']); // 'world'\n\nAs you can see above, we pass arguments via `next`.\n\nIf you are not mutating the arguments, then you can pass zero arguments\nto `next`, and the next middleware function will still have access\nto the arguments.\n\n Document.hook('set', function (key, val) {\n this[key] = val;\n });\n Document.pre('set', function (next, key, val) {\n // I have access to key and val here\n next(); // We don't need to pass anything to next\n });\n Document.pre('set', function (next, key, val) {\n // And I still have access to the original key and val here\n next();\n });\n\nFinally, you can add arguments that downstream middleware can also see:\n\n // Note that in the definition of `set`, there is no 3rd argument, options\n Document.hook('set', function (key, val) {\n // But...\n var options = arguments[2]; // ...I have access to an options argument\n // because of pre function pre2 (defined below)\n console.log(options); // '{debug: true}'\n this[key] = val;\n });\n Document.pre('set', function pre1 (next, key, val) {\n // I only have access to key and val arguments\n console.log(arguments.length); // 3\n next(key, val, {debug: true});\n });\n Document.pre('set', function pre2 (next, key, val, options) {\n console.log(arguments.length); // 4\n console.log(options); // '{ debug: true}'\n next();\n });\n Document.pre('set', function pre3 (next, key, val, options) {\n // I still have access to key, val, AND the options argument introduced via the preceding middleware\n console.log(arguments.length); // 4\n console.log(options); // '{ debug: true}'\n next();\n });\n \n var doc = new Document()\n doc.set('hey', 'there');\n\n## Parallel `pre` middleware\n\nAll middleware up to this point has been \"serial\" middleware -- i.e., middleware whose logic\nis executed as a serial chain.\n\nSome scenarios call for parallel middleware -- i.e., middleware that can wait for several\nasynchronous services at once to respond.\n\nFor instance, you may only want to save a Document only after you have checked\nthat the Document is valid according to two different remote services.\n\nWe accomplish asynchronous middleware by adding a second kind of flow control callback\n(the only flow control callback so far has been `next`), called `done`.\n\n- `next` passes control to the next middleware in the chain\n- `done` keeps track of how many parallel middleware have invoked `done` and passes\n control to the target method when ALL parallel middleware have invoked `done`. If\n you pass an `Error` to `done`, then the error is handled, and the main method that is\n wrapped by pres and posts will not get invoked.\n\nWe declare pre middleware that is parallel by passing a 3rd boolean argument to our `pre`\ndefinition method.\n\nWe illustrate via the parallel validation example mentioned above:\n\n Document.hook('save', function targetFn (callback) {\n // Save logic goes here\n // ...\n // This only gets run once the two `done`s are both invoked via preOne and preTwo.\n });\n\n // true marks this as parallel middleware\n Document.pre('save', true, function preOne (next, doneOne, callback) {\n remoteServiceOne.validate(this.serialize(), function (err, isValid) {\n // The code in here will probably be run after the `next` below this block\n // and could possibly be run after the console.log(\"Hola\") in `preTwo\n if (err) return doneOne(err);\n if (isValid) doneOne();\n });\n next(); // Pass control to the next middleware\n });\n \n // We will suppose that we need 2 different remote services to validate our document\n Document.pre('save', true, function preTwo (next, doneTwo, callback) {\n remoteServiceTwo.validate(this.serialize(), function (err, isValid) {\n if (err) return doneTwo(err);\n if (isValid) doneTwo();\n });\n next();\n });\n \n // While preOne and preTwo are parallel, preThree is a serial pre middleware\n Document.pre('save', function preThree (next, callback) {\n next();\n });\n \n var doc = new Document();\n doc.save( function (err, doc) {\n // Do stuff with the saved doc here...\n });\n\nIn the above example, flow control may happen in the following way:\n\n(1) doc.save -> (2) preOne --(next)--> (3) preTwo --(next)--> (4) preThree --(next)--> (wait for dones to invoke) -> (5) doneTwo -> (6) doneOne -> (7) targetFn\n\nSo what's happening is that:\n\n1. You call `doc.save(...)`\n2. First, your preOne middleware gets executed. It makes a remote call to the validation service and `next()`s to the preTwo middleware.\n3. Now, your preTwo middleware gets executed. It makes a remote call to another validation service and `next()`s to the preThree middleware.\n4. Your preThree middleware gets executed. It immediately `next()`s. But nothing else gets executing until both `doneOne` and `doneTwo` are invoked inside the callbacks handling the response from the two valiation services.\n5. We will suppose that validation remoteServiceTwo returns a response to us first. In this case, we call `doneTwo` inside the callback to remoteServiceTwo.\n6. Some fractions of a second later, remoteServiceOne returns a response to us. In this case, we call `doneOne` inside the callback to remoteServiceOne.\n7. `hooks` implementation keeps track of how many parallel middleware has been defined per target function. It detects that both asynchronous pre middlewares (`preOne` and `preTwo`) have finally called their `done` functions (`doneOne` and `doneTwo`), so the implementation finally invokes our `targetFn` (i.e., our core `save` business logic).\n\n## Removing Pres\n\nYou can remove a particular pre associated with a hook:\n\n Document.pre('set', someFn);\n Document.removePre('set', someFn);\n\nAnd you can also remove all pres associated with a hook:\n Document.removePre('set'); // Removes all declared `pre`s on the hook 'set'\n\n## Tests\nTo run the tests:\n make test\n\n### Contributors\n- [Brian Noguchi](https://github.com/bnoguchi)\n\n### License\nMIT License\n\n---\n### Author\nBrian Noguchi\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/bnoguchi/hooks-js/issues" - }, - "_id": "hooks@0.2.1", - "dist": { - "shasum": "3efcc7e0f4917e4ec300f23a737103e5b920eac5" - }, - "_from": "hooks@0.2.1", - "_resolved": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz" -} diff --git a/node_modules/mongoose/node_modules/mpath/package.json b/node_modules/mongoose/node_modules/mpath/package.json deleted file mode 100644 index 02649ae7f..000000000 --- a/node_modules/mongoose/node_modules/mpath/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "mpath", - "version": "0.1.1", - "description": "{G,S}et object values using MongoDB path notation", - "main": "index.js", - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/mpath.git" - }, - "keywords": [ - "mongodb", - "path", - "get", - "set" - ], - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "devDependencies": { - "mocha": "1.6.0" - }, - "readme": "#mpath\n\n{G,S}et javascript object values using MongoDB-like path notation.\n\n###Getting\n\n```js\nvar mpath = require('mpath');\n\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.get('comments.1.title', obj) // 'exciting!'\n```\n\n`mpath.get` supports array property notation as well.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.get('comments.title', obj) // ['funny', 'exciting!']\n```\n\nArray property and indexing syntax, when used together, are very powerful.\n\n```js\nvar obj = {\n array: [\n { o: { array: [{x: {b: [4,6,8]}}, { y: 10} ] }}\n , { o: { array: [{x: {b: [1,2,3]}}, { x: {z: 10 }}, { x: 'Turkey Day' }] }}\n , { o: { array: [{x: {b: null }}, { x: { b: [null, 1]}}] }}\n , { o: { array: [{x: null }] }}\n , { o: { array: [{y: 3 }] }}\n , { o: { array: [3, 0, null] }}\n , { o: { name: 'ha' }}\n ];\n}\n\nvar found = mpath.get('array.o.array.x.b.1', obj);\n\nconsole.log(found); // prints..\n\n [ [6, undefined]\n , [2, undefined, undefined]\n , [null, 1]\n , [null]\n , [undefined]\n , [undefined, undefined, undefined]\n , undefined\n ]\n\n```\n\n#####Field selection rules:\n\nThe following rules are iteratively applied to each `segment` in the passed `path`. For example:\n\n```js\nvar path = 'one.two.14'; // path\n'one' // segment 0\n'two' // segment 1\n14 // segment 2\n```\n\n- 1) when value of the segment parent is not an array, return the value of `parent.segment`\n- 2) when value of the segment parent is an array\n - a) if the segment is an integer, replace the parent array with the value at `parent[segment]`\n - b) if not an integer, keep the array but replace each array `item` with the value returned from calling `get(remainingSegments, item)` or undefined if falsey.\n\n#####Maps\n\n`mpath.get` also accepts an optional `map` argument which receives each individual found value. The value returned from the `map` function will be used in the original found values place.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.get('comments.title', obj, function (val) {\n return 'funny' == val\n ? 'amusing'\n : val;\n});\n// ['amusing', 'exciting!']\n```\n\n###Setting\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.set('comments.1.title', 'hilarious', obj)\nconsole.log(obj.comments[1].title) // 'hilarious'\n```\n\n`mpath.set` supports the same array property notation as `mpath.get`.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.set('comments.title', ['hilarious', 'fruity'], obj);\n\nconsole.log(obj); // prints..\n\n { comments: [\n { title: 'hilarious' },\n { title: 'fruity' }\n ]}\n```\n\nArray property and indexing syntax can be used together also when setting.\n\n```js\nvar obj = {\n array: [\n { o: { array: [{x: {b: [4,6,8]}}, { y: 10} ] }}\n , { o: { array: [{x: {b: [1,2,3]}}, { x: {z: 10 }}, { x: 'Turkey Day' }] }}\n , { o: { array: [{x: {b: null }}, { x: { b: [null, 1]}}] }}\n , { o: { array: [{x: null }] }}\n , { o: { array: [{y: 3 }] }}\n , { o: { array: [3, 0, null] }}\n , { o: { name: 'ha' }}\n ]\n}\n\nmpath.set('array.1.o', 'this was changed', obj);\n\nconsole.log(require('util').inspect(obj, false, 1000)); // prints..\n\n{\n array: [\n { o: { array: [{x: {b: [4,6,8]}}, { y: 10} ] }}\n , { o: 'this was changed' }\n , { o: { array: [{x: {b: null }}, { x: { b: [null, 1]}}] }}\n , { o: { array: [{x: null }] }}\n , { o: { array: [{y: 3 }] }}\n , { o: { array: [3, 0, null] }}\n , { o: { name: 'ha' }}\n ];\n}\n\nmpath.set('array.o.array.x', 'this was changed too', obj);\n\nconsole.log(require('util').inspect(obj, false, 1000)); // prints..\n\n{\n array: [\n { o: { array: [{x: 'this was changed too'}, { y: 10, x: 'this was changed too'} ] }}\n , { o: 'this was changed' }\n , { o: { array: [{x: 'this was changed too'}, { x: 'this was changed too'}] }}\n , { o: { array: [{x: 'this was changed too'}] }}\n , { o: { array: [{x: 'this was changed too', y: 3 }] }}\n , { o: { array: [3, 0, null] }}\n , { o: { name: 'ha' }}\n ];\n}\n```\n\n####Setting arrays\n\nBy default, setting a property within an array to another array results in each element of the new array being set to the item in the destination array at the matching index. An example is helpful.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.set('comments.title', ['hilarious', 'fruity'], obj);\n\nconsole.log(obj); // prints..\n\n { comments: [\n { title: 'hilarious' },\n { title: 'fruity' }\n ]}\n```\n\nIf we do not desire this destructuring-like assignment behavior we may instead specify the `$` operator in the path being set to force the array to be copied directly.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.set('comments.$.title', ['hilarious', 'fruity'], obj);\n\nconsole.log(obj); // prints..\n\n { comments: [\n { title: ['hilarious', 'fruity'] },\n { title: ['hilarious', 'fruity'] }\n ]}\n```\n\n####Field assignment rules\n\nThe rules utilized mirror those used on `mpath.get`, meaning we can take values returned from `mpath.get`, update them, and reassign them using `mpath.set`. Note that setting nested arrays of arrays can get unweildy quickly. Check out the [tests](https://github.com/aheckmann/mpath/blob/master/test/index.js) for more extreme examples.\n\n#####Maps\n\n`mpath.set` also accepts an optional `map` argument which receives each individual value being set. The value returned from the `map` function will be used in the original values place.\n\n```js\nvar obj = {\n comments: [\n { title: 'funny' },\n { title: 'exciting!' }\n ]\n}\n\nmpath.set('comments.title', ['hilarious', 'fruity'], obj, function (val) {\n return val.length;\n});\n\nconsole.log(obj); // prints..\n\n { comments: [\n { title: 9 },\n { title: 6 }\n ]}\n```\n\n### Custom object types\n\nSometimes you may want to enact the same functionality on custom object types that store all their real data internally, say for an ODM type object. No fear, `mpath` has you covered. Simply pass the name of the property being used to store the internal data and it will be traversed instead:\n\n```js\nvar mpath = require('mpath');\n\nvar obj = {\n comments: [\n { title: 'exciting!', _doc: { title: 'great!' }}\n ]\n}\n\nmpath.get('comments.0.title', obj, '_doc') // 'great!'\nmpath.set('comments.0.title', 'nov 3rd', obj, '_doc')\nmpath.get('comments.0.title', obj, '_doc') // 'nov 3rd'\nmpath.get('comments.0.title', obj) // 'exciting'\n```\n\nWhen used with a `map`, the `map` argument comes last.\n\n```js\nmpath.get(path, obj, '_doc', map);\nmpath.set(path, val, obj, '_doc', map);\n```\n\n[LICENSE](https://github.com/aheckmann/mpath/blob/master/LICENSE)\n\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/aheckmann/mpath/issues" - }, - "homepage": "https://github.com/aheckmann/mpath", - "_id": "mpath@0.1.1", - "dist": { - "shasum": "697ddfc722affc287acce8a6eb3202a6bc9e7c28" - }, - "_from": "mpath@0.1.1", - "_resolved": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz" -} diff --git a/node_modules/mongoose/node_modules/mpromise/package.json b/node_modules/mongoose/node_modules/mpromise/package.json deleted file mode 100644 index 8286951b2..000000000 --- a/node_modules/mongoose/node_modules/mpromise/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "mpromise", - "version": "0.4.3", - "description": "Promises A+ conformant implementation", - "main": "index.js", - "scripts": { - "test": "node node_modules/mocha/bin/_mocha" - }, - "devDependencies": { - "longjohn": "~0.2.1", - "promises-aplus-tests": "~2.0.2", - "mocha": "~1.13.0" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/mpromise" - }, - "keywords": [ - "promise", - "mongoose", - "aplus", - "a+", - "plus" - ], - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "readme": "#mpromise\n==========\n\n[![Build Status](https://travis-ci.org/aheckmann/mpromise.png)](https://travis-ci.org/aheckmann/mpromise)\n\nA [promises/A+](https://github.com/promises-aplus/promises-spec) conformant implementation, written for [mongoose](http://mongoosejs.com).\n\n## installation\n\n```\n$ npm install mpromise\n```\n\n## docs\n\nAn `mpromise` can be in any of three states, pending, fulfilled (success), or rejected (error). Once it is either fulfilled or rejected it's state can no longer be changed.\n\nThe exports object is the Promise constructor.\n\n```js\nvar Promise = require('mpromise');\n```\n\nThe constructor accepts an optional function which is executed when the promise is first resolved (either fulfilled or rejected).\n\n```js\nvar promise = new Promise(fn);\n```\n\nThis is the same as passing the `fn` to `onResolve` directly.\n\n```js\nvar promise = new Promise;\npromise.onResolve(function (err, args..) {\n ...\n});\n```\n\n### Methods\n\n####fulfill\n\nFulfilling a promise with values:\n\n```js\nvar promise = new Promise;\npromise.fulfill(args...);\n```\n\nIf the promise has already been fulfilled or rejected, no action is taken.\n\n####reject\n\nRejecting a promise with a reason:\n\n```js\nvar promise = new Promise;\npromise.reject(reason);\n```\n\nIf the promise has already been fulfilled or rejected, no action is taken.\n\n####resolve\n\nNode.js callback style promise resolution `(err, args...)`:\n\n```js\nvar promise = new Promise;\npromise.resolve([reason], [arg1, arg2, ...]);\n```\n\nIf the promise has already been fulfilled or rejected, no action is taken.\n\n####onFulfill\n\nTo register a function for execution when the promise is fulfilled, pass it to `onFulfill`. When executed it will receive the arguments passed to `fulfill()`.\n\n```js\nvar promise = new Promise;\npromise.onFulfill(function (a, b) {\n assert.equal(3, a + b);\n});\npromise.fulfill(1, 2);\n```\n\nThe function will only be called once when the promise is fulfilled, never when rejected.\n\nRegistering a function with `onFulfill` after the promise has already been fulfilled results in the immediate execution of the function with the original arguments used to fulfill the promise.\n\n```js\nvar promise = new Promise;\npromise.fulfill(\" :D \");\npromise.onFulfill(function (arg) {\n console.log(arg); // logs \" :D \"\n})\n```\n\n####onReject\n\nTo register a function for execution when the promise is rejected, pass it to `onReject`. When executed it will receive the argument passed to `reject()`.\n\n```js\nvar promise = new Promise;\npromise.onReject(function (reason) {\n assert.equal('sad', reason);\n});\npromise.reject('sad');\n```\n\nThe function will only be called once when the promise is rejected, never when fulfilled.\n\nRegistering a function with `onReject` after the promise has already been rejected results in the immediate execution of the function with the original argument used to reject the promise.\n\n```js\nvar promise = new Promise;\npromise.reject(\" :( \");\npromise.onReject(function (reason) {\n console.log(reason); // logs \" :( \"\n})\n```\n\n####onResolve\n\nAllows registration of node.js style callbacks `(err, args..)` to handle either promise resolution type (fulfill or reject).\n\n```js\n// fulfillment\nvar promise = new Promise;\npromise.onResolve(function (err, a, b) {\n console.log(a + b); // logs 3\n});\npromise.fulfill(1, 2);\n\n// rejection\nvar promise = new Promise;\npromise.onResolve(function (err) {\n if (err) {\n console.log(err.message); // logs \"failed\"\n }\n});\npromise.reject(new Error('failed'));\n```\n\n####then\n\nCreates a new promise and returns it. If `onFulfill` or `onReject` are passed, they are added as SUCCESS/ERROR callbacks to this promise after the nextTick.\n\nConforms to [promises/A+](https://github.com/promises-aplus/promises-spec) specification and passes its [tests](https://github.com/promises-aplus/promises-tests).\n\n```js\n// promise.then(onFulfill, onReject);\n\nvar p = new Promise;\n\np.then(function (arg) {\n return arg + 1;\n}).then(function (arg) {\n throw new Error(arg + ' is an error!');\n}).then(null, function (err) {\n assert.ok(err instanceof Error);\n assert.equal('2 is an error', err.message);\n});\np.fullfill(1);\n```\n\n####end\n\nSignifies that this promise was the last in a chain of `then()s`: if a handler passed to the call to `then` which produced this promise throws, the exception be rethrown.\nYou can pass an OnReject handler to `end` so that exceptions will be handled (like a final catch clause);\nThis method returns it's promise for easy use with `return`.\n\n```js\nvar p = new Promise;\np.then(function(){ throw new Error('shucks') });\nsetTimeout(function () {\n p.fulfill();\n // error was caught and swallowed by the promise returned from\n // p.then(). we either have to always register handlers on\n // the returned promises or we can do the following...\n}, 10);\n\n// this time we use .end() which prevents catching thrown errors\nvar p = new Promise;\nsetTimeout(function () {\n p.fulfill(); // throws \"shucks\"\n}, 10);\nreturn p.then(function(){ throw new Error('shucks') }).end(); // <--\n```\n\n\n### chain\n\nAllows direct promise to promise chaining (especially useful by a outside aggregating function). It doesn't use the asynchronous `resolve` algorithm and so excepts only another Promise as it's argument.\n\n```js\nfunction makeMeAPromise(i) {\n var p = new Promise;\n p.fulfill(i);\n return p;\n}\n\nvar returnPromise = initialPromise = new Promise;\nfor (i=0; i<10; ++i)\n returnPromise = returnPromise.chain(makeMeAPromise(i));\n\ninitialPromise.fulfill();\nreturn returnPromise;\n```\n\n###Event names\n\nIf you'd like to alter this implementations event names used to signify success and failure you may do so by setting `Promise.SUCCESS` or `Promise.FAILURE` respectively.\n\n```js\nPromise.SUCCESS = 'complete';\nPromise.FAILURE = 'err';\n```\n\n###Luke, use the Source\nFor more ideas read the [source](https://github.com/aheckmann/mpromise/blob/master/lib), [tests](https://github.com/aheckmann/mpromise/blob/master/test), or the [mongoose implementation](https://github.com/LearnBoost/mongoose/blob/3.6x/lib/promise.js).\n\n## license\n\n[MIT](https://github.com/aheckmann/mpromise/blob/master/LICENSE)\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/aheckmann/mpromise/issues" - }, - "homepage": "https://github.com/aheckmann/mpromise", - "_id": "mpromise@0.4.3", - "dist": { - "shasum": "a80553d1136d86e775e4624192cdebfaeec57936" - }, - "_from": "mpromise@0.4.3", - "_resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.4.3.tgz" -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/package.json b/node_modules/mongoose/node_modules/mquery/node_modules/debug/package.json deleted file mode 100644 index da7428cf8..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/debug/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "debug", - "version": "0.7.0", - "description": "small debugging utility", - "keywords": [ - "debug", - "log", - "debugger" - ], - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "dependencies": {}, - "devDependencies": { - "mocha": "*" - }, - "main": "index", - "browserify": "debug.component.js", - "engines": { - "node": "*" - }, - "component": { - "scripts": { - "debug": "debug.component.js" - } - }, - "readme": "\n# debug\n\n tiny node.js debugging utility.\n\n## Installation\n\n```\n$ npm install debug\n```\n\n## Example\n\n This module is modelled after node core's debugging technique, allowing you to enable one or more topic-specific debugging functions, for example core does the following within many modules:\n\n```js\nvar debug;\nif (process.env.NODE_DEBUG && /cluster/.test(process.env.NODE_DEBUG)) {\n debug = function(x) {\n var prefix = process.pid + ',' +\n (process.env.NODE_WORKER_ID ? 'Worker' : 'Master');\n console.error(prefix, x);\n };\n} else {\n debug = function() { };\n}\n```\n\n This concept is extremely simple but it works well. With `debug` you simply invoke the exported function to generate your debug function, passing it a name which will determine if a noop function is returned, or a decorated `console.error`, so all of the `console` format string goodies you're used to work fine. A unique color is selected per-function for visibility.\n \nExample _app.js_:\n\n```js\nvar debug = require('debug')('http')\n , http = require('http')\n , name = 'My App';\n\n// fake app\n\ndebug('booting %s', name);\n\nhttp.createServer(function(req, res){\n debug(req.method + ' ' + req.url);\n res.end('hello\\n');\n}).listen(3000, function(){\n debug('listening');\n});\n\n// fake worker of some kind\n\nrequire('./worker');\n```\n\nExample _worker.js_:\n\n```js\nvar debug = require('debug')('worker');\n\nsetInterval(function(){\n debug('doing some work');\n}, 1000);\n```\n\n The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples:\n\n ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png)\n\n ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png)\n\n## Millisecond diff\n\n When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the \"+NNNms\" will show you how much time was spent between calls.\n\n ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png)\n\n When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below:\n \n ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png)\n\n## Conventions\n\n If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use \":\" to separate features. For example \"bodyParser\" from Connect would then be \"connect:bodyParser\". \n\n## Wildcards\n\n The \"*\" character may be used as a wildcard. Suppose for example your library has debuggers named \"connect:bodyParser\", \"connect:compress\", \"connect:session\", instead of listing all three with `DEBUG=connect:bodyParser,connect.compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.\n\n You can also exclude specific debuggers by prefixing them with a \"-\" character. For example, `DEBUG=* -connect:*` would include all debuggers except those starting with \"connect:\".\n\n## Browser support\n\n Debug works in the browser as well, currently persisted by `localStorage`. For example if you have `worker:a` and `worker:b` as shown below, and wish to debug both type `debug.enable('worker:*')` in the console and refresh the page, this will remain until you disable with `debug.disable()`. \n\n```js\na = debug('worker:a');\nb = debug('worker:b');\n\nsetInterval(function(){\n a('doing some work');\n}, 1000);\n\nsetInterval(function(){\n a('doing some work');\n}, 1200);\n```\n\n## License \n\n(The MIT License)\n\nCopyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", - "readmeFilename": "Readme.md", - "_id": "debug@0.7.0", - "dist": { - "shasum": "b16ab3bc97e49bc1f64dc14d80fe6654e26e0198" - }, - "_from": "debug@0.7.0", - "_resolved": "https://registry.npmjs.org/debug/-/debug-0.7.0.tgz" -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d deleted file mode 100644 index 866c155bc..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/bson.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/bson.node := rm -rf "Release/bson.node" && cp -af "Release/obj.target/bson.node" "Release/bson.node" diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d deleted file mode 100644 index a7317de71..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/obj.target/bson.node := flock ./Release/linker.lock g++ -shared -pthread -rdynamic -m32 -Wl,-soname=bson.node -o Release/obj.target/bson.node -Wl,--start-group Release/obj.target/bson/ext/bson.o -Wl,--end-group diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d deleted file mode 100644 index 2c2850626..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d +++ /dev/null @@ -1,28 +0,0 @@ -cmd_Release/obj.target/bson/ext/bson.o := g++ '-D_LARGEFILE_SOURCE' '-D_FILE_OFFSET_BITS=64' '-DBUILDING_NODE_EXTENSION' -I/home/vagrant/.node-gyp/0.10.24/src -I/home/vagrant/.node-gyp/0.10.24/deps/uv/include -I/home/vagrant/.node-gyp/0.10.24/deps/v8/include -Wall -Wextra -Wno-unused-parameter -pthread -m32 -O2 -fno-strict-aliasing -fno-tree-vrp -fno-omit-frame-pointer -fno-rtti -MMD -MF ./Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw -c -o Release/obj.target/bson/ext/bson.o ../ext/bson.cc -Release/obj.target/bson/ext/bson.o: ../ext/bson.cc \ - /home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8.h \ - /home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8stdint.h \ - /home/vagrant/.node-gyp/0.10.24/src/node.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-unix.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/ngx-queue.h \ - /home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-linux.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h \ - /home/vagrant/.node-gyp/0.10.24/src/node.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_version.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_buffer.h ../ext/bson.h \ - /home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h -../ext/bson.cc: -/home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8.h: -/home/vagrant/.node-gyp/0.10.24/deps/v8/include/v8stdint.h: -/home/vagrant/.node-gyp/0.10.24/src/node.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-unix.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/ngx-queue.h: -/home/vagrant/.node-gyp/0.10.24/deps/uv/include/uv-private/uv-linux.h: -/home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h: -/home/vagrant/.node-gyp/0.10.24/src/node.h: -/home/vagrant/.node-gyp/0.10.24/src/node_version.h: -/home/vagrant/.node-gyp/0.10.24/src/node_buffer.h: -../ext/bson.h: -/home/vagrant/.node-gyp/0.10.24/src/node_object_wrap.h: diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/bson.node b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/bson.node deleted file mode 100644 index ec10a654ccec8b093e2982c214a9ad4641f77238..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57156 zcmeIb3wTu3)jobEnZR(7iHM4q>Zq|oMaYBz2~rIt69@!|BtcYg7?Obml1!XS2nYyH zf=tI?s#fmKh)oI=f^4b(-^hff{A(5+z{QZw<+Wr^iIRh0gz&SwWX915Q|0wc#9tX{n`Brs~0Kb0b zA&sY=SM@=#e#gCddow5|M1tZF7SmfMxGZZx~mPs`Hh-yb*u-qetnfzV%eIjJy)i6UvS#I zlHr=AOsgDFGR>+@zi@okoT;{#2W-`*Q5Kh$HK_1ZZ9`;$b}LA6u+9dYD{wa9Y{u!s zc>~TM&NiGpR^eQYb1eyV+yvZ!^Jbi#I5*Kmx&G2ES;J2M(0AR(@1A|IXK!)mU27bNKi#n7!qb<{UHfT8*BPhU9{TI= z^Va8_{-M({_;1?|EqSo(yr1|AUwh%JdtcgmFw(T^?!{|Id}zD)^S-tFUf$&FTL10O zA9{LA#S?q~^41Lt&VBw5x38ISPUVo7zI*B;pWRe_*Qy-#gVJl)8R>WZo~%?-+RRA4l4QFTS>?b=Teh==k$p-`_Ce!-qF*seZ2OFT;8l z#P2xs&GUbYZYCW&IH2m_ zCHw14>2G67e@B|yWBuVoaUuWbtW+J$KbS(tI4RY^{9IE9$@*uTo-5^fDTO~|;%E7a z6nS^2w6~QLUmfIsI;H-bhv~Vpy^m7#HO@4?th&UW%>Uz*_70o)+1{He^?f~so{};i zhg0f5!b;S^%Y!NQ^>K>5oSstuAd|i*s)Np29jwoqQvN&>KlATMsqgI+d#E?bPt&%W z#)IWwG}%AfZ^Ee`%zr*bU)QIM&(f6f(NgH$1NGYE`0YxOzc9sq_NMf|CQ)42?*vnS zDDP7#bZ3e_W~Jm`o}$mEQs&E#Q|8;LDgB+DBH!Oq^ldZgllohc(*A8sR0l8L*9jo$ ziWK{4H|d}K{b5S|6PTzDUVb-4z7Z+)-JjB*r789QG{wHFQ`&nmMSloejDzieW>A_& z`dkxzA+86ejL!!t^=~othx(Xfl8^M(6#3S|zWQ96R))Oc+7Rt(jAt$uYOdcGg1!=k zwl34OCt!EX-^K9&^AyG30e;7gh&vSjWw76^OEv8uDt`X9+isoGf@=k#L8H&vA{h5mXFq@0O#IUV2RgCt zq2HtX*G43M$A?iE226d?NAEKXT{pC;|jAw(&-vs;VuF@aJ5Zl_{Qd*umW%#z zd{}-b>}xUP$B+v8x2SJ@c|t$qU=QnOBd$a_*8iInc{f2neI8BQg?5>LKgv5$ex8zN z81%7znx=t8}zocf*ya+?bh6lEsa6Vy%d0~x^@pQJxz_Py_$Pwn|q#j zSz{pR^_Mnz0s(J;rQ6%xtzLh?N3P&1cU!*ZzIIMQTanW_!_!jV-E>{hn55!PQ>Ric(Kg(-KeJ3YX}8eif8LU9|)uzte39 zRHZATV1AKvQbk~TlW&QqX^z+9FY|hW4X&yxbhoIn1zLtmC(2etuhZHcKhL|=&?~Ad zE^R_Jl;z9RQdS`9sF^<}tpQ!xN{WIB)!ty0Psmo$QlUgFZ}c|RyBd@FHNLDd(CQ0# zW#_((G!trD{K>s?7U)VkNzY((P(smc&kFAeIIG&K%G;n~XYvRo2B><1TkkiAXu@od ze?pw)~2T7oC)%(UvcbCf#Mm06ZA^jzO0HB~5>WtwE3I4!hvsn=gv<_UU)5hd*3 zWIr^@S?H=XsG^F1vJl;@)L4z5G~FAd*zMl>DnF*6KiKFExZ0&zP4)Xdt3*4Zm>77~ z{KQIc%QDy@t+-zSm?zsU9RG#dQI(3;ocU}S6WZ%(cIIF0Zwz{2OXCY%^JXPCn(x-l z-^D!4B1|PDruqWC%aG(mZXlsGXTE4q`iL}{3ML4dq{vmU#zyE+wJAlMQ0c4lG-VKm zO(^l4`L)5NMdp5+GK)nEWN=m0HC8nWFPNm4@q$C@xT5~QrkX^Di)wtT1ERMkNn~GL zRl!CfL_x7|t5chr(8Hk7ZQVC7hsR#6HN(^S>fkHe0biuMP_;9Hmj2a#b}xv#ZLWB(q?`L&+-8vq?&IqHCgG zG~s@PY~%`>a9s({&d!-#ogRY-8$f-9b3#o`vX9iJlvXeq8lDqsUgGtOw0c%YJ{W+j zy;Y@<``qDx#?@g+wu*Q#wMQ>?Bxu+@6ufhuoO$ryPqJkJO7%H0} zl0^%8jHUMotGH`AjfY38d7K(6<5_XW(npkA81T9Z)(A3T32ABH1Ug|cnvJ}_&rulq` zJ6mLTnKMbHD5;?a;rf^0=Ap%nWu>k`r68ixY0ONHRDm;Pyx<5CCS)G9gcJyaY8yoi zA#|R|K!7bhNlLoYDfvwy00d9EXDOQJSy|eIAogStAC?-{UNc|xfa*b23>}MP_=}q- z1nOQt#_gnaiTNsnj1&_UeG+}qBQ!SJvQjuN(X#m_OZhHFM1p~(4R^`VaE_;0d6$H; zoCREV>VmEtLU2Cj$p!Z%@4Q4q=$JQHtuM98siy~ThS4Tl41%wKzPcFh&XBE;w^8P% z=_XVYF5G3PC@?W2Sh=+6BHo?&eL-IkE7@#su)$Yf!J$Zr^qp?!M7{kQZ*yxC95HSb zU9PqoH^Q5$ayF4zK1_2##-OHg(Pv38m8qtL%&f?5u17dM5#G9ss~eZKc!D86 zg;K_(99uD#;_6k+OMFe{JLmMqh4YkZFghsgpGKgkj8FH$3IC~US>B7dI3E3ZWN8ry zs2hVa^h$GHiQyAtC8l|PoewdEKU5d=`Q1T%6jXOJ^k|~)kah75%W)P;>(Da_oak7R z-;>T*Uj#E$UVsu}M?*}C&Qr(QahZ)tKR#J(N_=cy`&CFDKwEm}GEW z-56|;_k&Yg5IEvKE^|5BcbYjnotU7yjY~~Ag(yb#{tBm4=tcQgXQ7cL{blNOE=X7} zBXD!vk68W-S1$c+RaDWE)7SYz2wKvz=mup_r_U~B>cT;mCM-PRY)_`CgaPP*lp6MgLrNM6 z8G%YObW+s0D_iu)iLI(Tl0^TV1xjls?W7B3xn^=?Y``~mq6TtVU7b5X%1Npf_tUr)B%M#XPV%|E z_1I#;L|qaH$h}?7jbKX%FqYew+Vs*=_jr^kU9e#4vuFJP*5{U-Q za(bP-<}pCCX=a%%+W(DzkK>sE_w5}xtG>DloANg8Q>5{*K)$UiKOgk39$$e@4A3li zu*EXL(YIrVKU*8h2VFQU(84biKj{&Tjd)@(P#dZ9pVf#bK{?umip~}JR&A`J1Lre8 zODj_K&BOykK0>%wp+{j*VTZz~!uu3Hrtl{U_bA-2@OKJ7RXCtTwlhrOc08bAyL}2j zQaGkeriT^YsqiJ2OwXS#@%MPf$MVg1_Cfs19Eof3Y>M>it0b<$gBa2uDXgrP^!F7$ zsqi_4y$b(R;d^>Io>{S;cNLDQm2|bj2Ne1j$n=L5=3XP|T7_#A-m35pgMD9Q24CEBMKeYOTHR~n-uO)cvRs9 zZpk-KVOZgIg&!z9W3l9$uCQI{NK!Qb~VZVVT0K6nYdkC=4u<{I@7{ zG^lzN-k@-^!XGKzqwpUJcj3XOP1CelDqo6h5PHx5C#HzN7H4!oMnX z;u$FGnW}Jx!ttwQda1&h3a?hUOks<{8x=+s-mXx)QPz{I@Jxl%6<)3IdWAFBO8$kN z5=V4NT&D1-LcUW;xotg)PvN-=k@Y`vEWHw5^Bg?8R`|Tag9=}SpJ4v`3h%l`(mNDZ zz)qRptneu8hx8S&3*tu@U*h8!AL6eR{zKtxj1SZ2Lchd-!aEh7d!tORQ}}Png=hb# zmtf5}AMZB+v#_!_NC)xW80HCaQiH@Yg&u|L6yB}yS%tq=IBY8FgU|lKT!rw9qz_;| z6Q4r;L_B5|xL4stc$UrdHL!c)81@f%G3%SGX&=Jh6XWpr#E;>(i8a(KaPZQuP5M*n2Y7_z0BoY& z0WC`;uBP3A9>sA0Vy{r(28FjPd>#Ie`Jd4~fzQFO5l_8A;+qOr(_WE&1IG_&p?v~F zlnXkrP!3>_asYqkk$5xZ06m-S0q<3qMY{lf7V8Jz%=&?YSTFEC*6Y-?w~2+YkLeQs zStapIw9WLnv=?B#!YvBNQ9nqxVx1;>6}BsU_iCAbOyN-47x?lNUa4>l`-Ajp3a8dd z`h4mO=`Sg~q(#zG6xJwwRN+dj1LXgf!iN>UsPIjNe^hvNP}Q&SQLGazzgyww3Qwip zke;uwRN*xWM@&QhS2WE(M`E|a#}qE7o{-MYC(WUTr3hg(` z^nD8X!Yj)=6$TXkv_q!-~0~0}@|YC-Gy-jr^-9FYtcK1>Ap&#DB7V&}-Q)a1rYV4(pa!&H86+ z+U-R6uHCdl`Y^Twe1_?lLVrZq?5}~ANwClVka&;6?nfkT`;o-=$PeFoS}AaVrVUvm z@m0zV+If>i_stS(C^ynSq};$8SnhP#Da!#r*eLOwO%iufKSjdzoWfJdD_%iE99Xr-ayuKWG zCDudYEKLitUazK|GgD%n!fTPo^exN}YuZH8z(Z9MFX6lgeKqGZ@KN$jfgYI;e4Y8g zW3>_okPq}i+CA`K*hU z+BxvYv~S=6g%fCB%%`09XxhvfK*+L0;b#hmSIG1`6_!&U;Jcmj1MgifF}p=#H_Kg! zID`Da*Et@*HKfnQdvEk7z}=*==KO*9E$E%~=HvZUjwA3T(x>8nlj93KNE$NlV?OZj zq*oy(<2Zax)4os4)wG`zF_yn4PJn-A`O%n9M96qA@td$4)&qQwbRO0b_6InX={I10 z(te1fo3S3TAEzOnX_PpbG-R;Qe*tghI0E0}I0Aph@dEyRlSB{eyIs@XB96y0{}~f$J20 zlX?NYmU;nxlk#Q5|5F~|YitL2CHa9*Fn=oMC-Z?HGaq;x^MS83A38dX{R0+o9D&QP z0uDudOI!$hV>)mw^#$C-dVnh^KkyUsjnlLu)^jn|VU7!MoWkoB#<&h6eKKr=_#FES zyqo<6-q$AaNrh{wL0jS9sAu3eNY}w{k_HZAy{L1B!e6m|oENcv;5S(hu$cKM+n_M0 z@NeWtdR$={%Ym*^xP#?EA6Mw0|G6CRJ1gu}Xyte!J*Mz3rVoT)St9Wn)(83qt_Q%z zjS^3v4;+Q{U*Rl;gE42A{!iL5@R^9jA95TpcOK_B08ghq0UrrU9DA)MV!I93fyViy z!apb+fw5w`OJSSBA1d6haL^*jH(6ne!tX2mPlbaK>#@Eo6#5lDr0{Kp!`zbZN`o*|tMGP(FDdMxUcvV@*Z^_0!Y38}RbfHB%zs|tnO;eI6h5c$?+Rm_ z$Efe89^jqO6Wf8Ud`ID>YbAa8%@V)Xq3BMDHMdHfaht?zSgu&p>WC=w3~>91;9*BXhRk%lkLZ2($u z-$sP);|d=kALuJdU#4l5KnvE0g{0BODuv%x_>{svg^fT9WZX#nF7D%rK27T(-h%ZS zm^B#nGy);Z$R4BvXVOoM#ry5xC04dd+(f?(`ooaKuhSoaj*=fd$CwWc(Jp{1IsU+I z$eeWv=oX3l6yC}Dk)8`4;#UI_r_e8fKBCaZ_COaayhdSIVb(gCKbrD@uYrCLGQCK7 zfL4wV@ci2(hMAAH>ewF6e|X_O}l0#@H|bsk#+@qfp!7Bly(68k;2i; zFTpya@aNBy=N0;pZ^8BB$S0nUllVP_+gZL6dSrQEJ<^E>6~>qkU;p}zz_Sr2 zah?G$ru@M93SXk#T#LAfI0o@0{YEX~y-taDlWs%393S9F&P(6|g)6Bq(7z?$639rs z0`t}Z9q_ZaNc_)RC3Z6XEchq#0gsYK9fLSOM#2v=9e4q0tVPd~23|?phVOJp1K%Y* z58pJ<4*<8U2V(BOz^F3AZI{Gt z3eVUq>EDwN`9EU6Ap6u!zzZ~O0@}<1?<|EaMChc8^+V?6EDyY-8+b4DcQPS)OeQvVhhD==pAIUfy@m&_9zn zd(bCxI+HnXO6M$r=Ezy8I7fa{)cLJ+&h?6Oem|Vu>QM4p^TzZjxmdpq1klPv$c#Fq z%)4`V$z$(MSpmw=uRio+q-@yv{7q2bk=*q!j3Gy8Q8dhcLdV0R;SorQSo03;`*%DZ z4d;XEdg>UiyY4uO|40Al%m2;_|JhP}G~noZnx(@%mWW`BZwZq_mMK2!|FAdA>Mh}2 zxz}C4XeZjQJ~S2y8+KkUnjaCp<2d*`-w2)_4Ih{759tzVkr8?EzVNOQ*Dcx^JvUEw zK=gk`WYMv%Mf)S6<6WUQBWwQ|*&V6vi=3M$#wl9cN0p$ac%(2dQj=$u$*woUN32YN zp3Esz5-DU*wM33b7afa+jz>}ZkwrTry+%f%Y^GcGTaHI$(ecR6NC=xPk=G)Xc{a96 z?NOhW@zF)cqajhv(r)%Yd?YWr$kzEz`|!)pT01N{{N+Dnjea|l8(s9zy*s;Vk40QZ z(Q_r@(I^s_1=;rQ6y@imf~u%17kx)HXcE1Rx`yH+y>sX*QoBM2Og)BnI0tAjLpeF9 zEz*%-Lg;9zwQKhAt~Sm7%o*X{tWCKX-{RLIrPk1&yS&FakX_rq%0lQ5<#9w^wysdF z-e**ms}BOuipi=)c0x#K5#kON(!fwTSH*RNQ&31jc2=xc)vX%p+JV(iv==@y17jXt zI~3#Zy!7m-HW3}Dm+I_^^oA~qx(<5B=a9qRc?bSa8d5oaT_xv-4;&r6t7~E2 zMd3djMeBxc*c_!2mb5U>nyB57Q5j3ui1EdJq2l$I=LKgmkM^zGOJ|S0BZw-(y(N)_ zc{XOhjOmY25gix*ner;(A@n>`a;qs>Szgym6j0h9v>%;4G+LV*0gi}NStC`puA0H| zNY&8wleJLR-kof>TY8@o6cf#M1;5Jj7`_p-pITdVQEt>0862Hqi_El&|7~5f;@KpJ zS{B%%^M+ntHFV_=R(ZCZm#lVsO0}7q>eh!5?OjcSIXAj!XmqVD8n8wFN;&LL4-UU# ziB#J<_J;m8`mf?v@mh0A0;n@ zy_zkc(y3ij`=$(9d4;p*@Tt0a-q08M+xpK3tsIRe>@V)^vaatco7xvqMMA%0%U9dN zyUwNjJ5ln_)YE$~1-l&Q8Trk}N|Fqc}n!A1whd1NSNKfDWk z487M8szzD1ZXZJaDbRP<+P(-p<{@aR+fdcgh%MZcReUt`Cm2y=O`a8E+2|JZsf6St z#p@k5&Bq0_yhX?CPrF87PEu^l&(P5DXM^n>=VWWzx~syU<=Q*e24qX*Jqs#SXblw;UVfNsWY={yYDN5wnJLVz)vTuGu zR5;Myv4`ZX?+emq@7RL+!k-PYcPzjKwMB*hEk)ts&rY#-*o9yl@PGKTVfK!Hhzol+ z?K0FGF1J~`*1oyPwM$u~utxNCSG4v``_ny<+FjH=Hbt*_RhKR7pz&YQ@I(GJ$$~depId&7`8*b2zetg2C;OB zb&JNZUHIu}xecA@s(^b~4895Np{`4DSydH+(v5m;kJY8cJ^rm(f zR2wa^QYL8|Gaz+1O`5A+v z?Hs(=OlT+bY>K@`!KACnk9B{Z-s%a|?ugkSuh3_7hMG=FrL}t^v-kb3c~5#57fDUn zpJqU1sG{SI;F-xim;gR8uac9`Lo%oD?7nDLbezWP2*b}MMn}ca47debxb<@xc ze=XL7Fi0fS`NbSL>>s?eu4;-yMAXYCuycI1K_xb-Ly7mAfC_9&+ zqQn|cvEEIIF7!Ym(Vy^VcsU!uVm+y zDtYSxRR--(*ZwN9c3ce&oB zeM#TSA(TR#7X1e6QtovPn&w@PKCH;yBywp&NEMl2l4YgzY88QJr(`APLI;LeVi~eU zvQk42jzMMW80e1Au?4dhVmV`Z2BL3+Tij$o#w}p>e))nJ!VDmL_+|l{g=v0b+79MOV`?636n~#(W?0R)dL@~ch*p-1w%gBX$j^=OXKU? z2X@*!*?rC|u?8i2i_In{EfftHT#??CeEtujD@XqjD`fqso0xlLa66$xT8GO{4q+JhS@xgggj6? zD?uGbC6S%6N7$g)H!*1iLj_^k`ZM5`1B>qWaE1#ZP>*R8SyHJFt_(cPgTjQp85$Im zT}@io4(emkF1GF9SX2(J2Nd;VFN|DhFsh5|n| z`jf(6jLIc{xix&qWZ1!_;dM64x(0L;O*6}6#ZpZ4Tikn7e?z`29G7w+A`TFpHr&`G zsE$)3_6IDHSnizf#9ZhA`8Qj~>ma=pL? z-DvR8_YcS896Ehp@#`2=c17(g9;EWXCwNY_`*-L+P#Nz2f;EwyCUeu*VB~~8)JGK? zQLjXfANti17$=gMJ?m!DvJH0riChPP#+Rjbsd^W zvNjP}Jb#?^y#P%fehh3`>n;#%hI_$cIDx{9+5)yMWVVqc1=@6IyZJB zQVXWjB~$p2(Hmr0BCcoco8OnN%%l%W5OHm@Z+=|QGK4_ZfQV~*uj_HFN`GcHmJ(wf z8AzdPcj)Upr4ydrw2tJlOzJukQtL=j-Y}-~r7H2Usk@0NJu5u7~ZL z4@mb&O`A31dKh!_Qr9-#kA?QD^@Cn9+qDf!k^zS93ZKO`cNx6oe6m@3pY)i z+D4EgC%agKwp+MJ!TlO*TzIGD@vc1K+YE_J=jCWN0bdxb9C2O#)CIbjaiV2fnM z@-c@nZy*+(0eWIy7vApAkDjLcQM4i|Q~tP2_s1#er}mTnTlz$CX%a^i+7j(wJJ#fyY-$oqfnAIkmsMc9u&w!2hV*zQufwVqAbT`KMuc9+VH?$Qyo z!QG{JhDkVAfj^k^hdbm2t~ME0w;EU7#?=<%>MrBz9^-1Oer1Y1Ml$x$cO6{E zFeBjjI;!z#%^0oHg=Sm7Fk=C})tV@<*j(U}j0N^4wNP&^FfU_)?xX?@<^q>xEO2#F zfhKc-i5Ux=msFtDT;R%#1wOEvI~_C^D9>2n(WC9Y+Fb4|uF8cT*jZV%(Y2nwx*O>m9flu<}#&)=0?_K zEbsxs6r(exMdkvxWGukHPoF3-#av)>#sYz)0wv}GcVsLuIjKOIxxjZa7O*50m|-rk zHDiH2&}E_zmF5CJ%vgXo+lc~I<^oS-C=k3h!CYfzel`>HR}#z%^k>SrZ|jLYhNZ0= zkM&B8WW+0YVD<>(7UcW6+yTYCB~VlJZpJ*IH!G}Fc&@@fAQE8y4;8LZI8Nc;u=+Fq z8HG10yi#GdLIxJ19fh?D&sF#bDSzw{g*PZHQW*afOZN%hCy@JzU9SDQg0Q<8UTw1k zFJ*wU>05|RKKU(giV_S-H$`$^VVdbCD~SD=q0y4W&IT#m0_?;7p&oE^O^d^#@HFxP z47&_=cIqNHQ?symB?_HQ0Qoapq_r$Si*gJNh`ZJyz}drbhUps85P0SUS5PkY zm+`C<{mw-z?Eg@re&dYxWy`A7;3!eE+L1<9tR0G_*51JMFu$|Gv}Z8`%xP)wyG+5B zMAsrd8xr;FtpkM#u?``PY7b99(4@G}phc^j-5&@RuC!Rv~PVOK(!8 z)a?DDB2}v(%z_+dnf@}rf|nor&5WO@VC*{RgRPb0ku`zY8y9B)zpGHpopgTI_=zK25EcSUw8B4c~Ine_}M$ zSYYgo2e9(#Yq#7}w0D%i0*vQVk5iKCvSXJbQSBQ$--t~I)h(rv9zpKoxemrE6MXD| zQV2ZESzN^qauvJh*I32)zMj4hJ>3?&flk@X9~=ZV318hEz6nn;3R1Ng`W-8@v0t-T z;YF-a#0S`sT8rw;^3l3oyAHp6sP$L!X~4;z7^E9*`t$uJ?!T|?F}xd}KjzqP03t)77nI#T#8klo4jv_U%_iY+w2|p!&;)X+su~r6EK@C>uOqB?Kb)y8i&praXpGX z+ARC#TbPdrJBzk+LmFI(d9fY6#olv5_x29nkeeEexE_cmGmJ=Pko&+}?VF2aGo~GP z)GVF@{Rn--teFl5=U&mxbJ>O32eNCob@ptU{TAOB=G$AfKiX8gKU(`}r1o9=W`+vf z0TnOWKx{mGd(`ZPqAn>tUW8J1Zv>lg#Fx7IunT;-YinB_2Y=z682mXHQM@9Eb(l#( z+Qa2YGDams++H%|VeH#KYVUZ0?&+0iiFy_rU&wOC4b!}_cm71E3{T#E zq{VJ$9n_yOuYzZBYxY}=3H5rV5QksDAX@C3AJa$BR4DYSG@`fkJj2~$qCU>Sw{PaF zgF+bdvz9xk;{(+3Z(hfoqn5ZRoNP8JKg1_D#$yn?s#Xp^_)8e~b7)G7y==s_=Kk3` zUqR?4yTTv4OV|AM)qSCB9y?i)DFFdj$kAb z8nt&;p+s~x*f;^LVgereVLIDrvOymsX@GgT0+K@fQIx(p$(K>KHEI} zOp-s{aAXrSU|O=br7hXPcClPf5>6Fwa=s=m;AX!rFK(K_1$rx|aMC)`$RXnI^&QHC zVAOB_OJHKXY{ll=p}(QqhhOhbsxNdZ>s!wuf0Nly;<*+Us>N7+62Ze(d&iT)p>Eav zHf6h!Nv7c80h(*=)^r!Y6*cy{?xurX_Z6}3ndYE~9&ScA!C_&w;<@B>tOO6>eK5XD zB)sZY3=qASSf23^?rwU8+6Q*K7?Ggto7yJVucNN5k=iYY0qEax=q=&dyIZXT@RpJ=AK4f_pqbTh4PRz zzmdY8lgBZ56^HFf`kbWkwRaS-S>!!z_#?SqU{LEZEllf0HWk42Vyk!ol5vWC^JY;z zIpVW->_%mWyQF<_q|8gh1F_l2r5%}EbkIW?&Jpsty4hoNP45)H^DqdQ6f8QS6Y&Ui zBnq0^QHpZC8+(Tt+r|6JQr_ViA^Cx=A7xx!BFCPLV*xwgpuuB)xOXD-}F|7wgRx1l%yoPP&7|7u=^ z9OLLTWq(qXYV4Dk^g!EiJrG+W1+uKGN)z^5J+W!Yx>IF-D}_vHf^{-LZ-9CyIo#wuvnVz8K2a0o1KU zC+;4s!K?7}zb2Yne5QR%4>kuX@X9N^Rp&-%sxuzEUKbNt^UQl)dL`zYlbv$jp!k-a z!%wjvn%ZS)Lv8e*>V=<)`VNYjr#+cxK_;G+$vl-hPx>%G59<37+(1=X@lD++;S!rA zQfu2A!}yDEXt9!RL2|84daWj1@>1Uaj^Ot-rPkfz0Wx$qL!a!>&@|3*<9Xj^wyJG zjo|wgenV(zU_G`P$l92!foF6LaC~&0hGd=xGx01-=1JCMQh)x(^tbqD8TFU3JsA6c zeY_%zOv*eI+od;aSk1}%``_0-2kHxaiz~iw!`7o()?l)yBz1$;oa{s04E^i3zC&L% zsYU7|4;!GILb+Y8WAZ)tp?E1ZX_w>}g9SYVbX}m6#%H*q|Kj-=_BZe~ z!BErpp9t^61Nc^Tl3KTOQzD^R_zc&!V2kse=oMYlEs+7-CR!-IRUKNa+=4^)C}~Hv zt88FaWB|7V@E8D(wAi<<#Jga)WE)T`RKjr32;(nbv@NtHJiwCp5;dg1zbNlDgnrfc zbH@I76fgDdGK`^i7j8$h@NS|l@>=-A?C`&_L--)uNZ|=y7tXdM4IpZ2gNn8WWgPkT5^U4#!2wh za-_GX9!sU^2d~|uR5VL|5yDPOYsa@4{90@w%v1O zTePH5A5zAgd;o~gL0G%UtTJbc_Dz{Zrp%5GG!)Jr8he!iCEiCIu5<=H$2>ytRQn2@ znluQvqad_litcfD{-)kUI6DXLVImMr;sC=uox^f@Y%Z%X2{=X1qb9H$i#t+uxJl`Q z^Gm4E+zyl@n1UIZOv2$~%_?wGu98-EO)5EKrF^-T-&>rIJA#Cz?y+RY?nM}7mblU+ zai^Bj9aEHK@8ln>FqbSbmF)Pr>g#&k+3UlfELnO_qU#xv_tz6M|5_+L*`%)08%IwO zl}*E>7h|i8jtY*B*Lfmq_seO?eSXeewUCMr=1?;B7R*Hzk9{>oL1lU^9MX?vH@iab zhO-Ap*WXI{At+_j$Mz~dr^DYXFdV6BOby+7zN?g4rb?DMn4wH`eK!lFk2N1az7l>v zn>RG*eh#ZhEgyRnZ)~99Xvvz?8la%7g)R#7V&icKBfq1$IrNig$&lBNiydbl3Lk#Z zJmfVdU36BnZjNU$NKtS+XwFTt8+Aa zrw@J|E2W-AlUax)2#B^qTcT_G!UHY(Q#`35T|fBVz=n^iA`qvK@5|tMJ03v5jFaO5 zYH1&(Q+=R9`hu;92j*M5R$C(L_@F-*nnY)fhyWspXQ`OM#^Yi{1o(avH|+b8LIOi} zOe}O+>OuZF5H%*3FX{{3jy+c>t`{DdUxDJK^*xcSvuA#uoWy_@nG_9AUz@>A+I>f@69I*ic;8f31zd@=lr)78K#)M&4Td`&j>dgvB z$+2#OXjFuiM6)D%vov`TI}f9X=N_utZ>@a=@z5(G9_o3kx#ykQ9d9+i!mngpJKk~a zcx&%FvG?9ueL?8__xAS=$cj&4EHz_vFAAjx_J&-85;hR|4LjHyDTu7L zvLms-GAJGVRt}XPd~@qkxkNn8#!e(YGQ8Q|c{h`F2ie&Ls&~NP zSSPT%cfcvJ&!GpzOKbNe#YLrfS4{*g7{)RFX3K+gtrm6j^OWC7@?bW^wG$G*u8k~Rk%yoY&+GNae5>| zQ{An`>QT_FwldwO=>WLI!6Yc*jn?PzeS-Ep-}~<>rzzn($m_PFFx1-3Xp>K%gy`r5 zwh#6gF-xKsHg;6+4D9kb-p@ZTJ_~?H;h#QWtvPzFj7Rx&>SVPl`E}D%aj97e2lZYd z0-C2g(?4Pn=RJWTi1^mPB*>blVCv$D71a5xc_28aWPnjB_5T(8b+z6B!=Q|=&_2er z=GF~A5q0gFCk9Z(D-6gJA^%(dU>&D$Lmg&t_-)1pt!3cjjrUB8)O|ca~JrGiu8t?VVNDrYg?}XT+a`9E5yqlHku0`~TxKI7j z1RAfNo*@{tZ$1WnJKyPAdmt9!;HrHF*8$oK8lbl26cDvwky*cPkY?}rK8IaQf*)tDM_bc8^$z|rr$hYW|KeVj>$hFk@ z60gJKkiRMF@cAA5t>2bq_*;6AvcAcC*%j$OnmTH<;|fPUji%1$_lH`84*r6n13xPn z@GSE>rs$n>H2GSVX?kk`hu?ccsL>Cljgrlc;;#{o9^;U|%Q>o%KWFN|$R{T!G^`IR zROoZmCw{FSRfw?~<@W|cO~F8hDuWH^rQh#ag*^GQn4^5H{2_Z!)9BHfCiUV`KmKj# zMQNo`{mwo9J}Q2U8zz><=yA0A0{ErdHZSC7FQm{~O+%yq#Ox>U9rav}UmAt2r?=Z8 zdj*^FIs*8;S+n)0stNT}H@0W2rXTU4m!%o`TO4xY%PA-|iK%1E!=<5?I%-!;8TEU< zj-{|JFQzOejB#=xD$2%9Yi#lOSB)!$M;TYP1iy+sPX3-*`9X5BEz;Yivs}|7K}&u5!$q_8ca?8@-xNLf<7^`4e{z5F@xcA z{9gRBG79MZBJbQ5nvDE8_=M?R=D1FFeGz$TR{9JclYJXZW8hW9ub_jQ^+5x{lN4D* zCe?R>)zm^`)T|x@wFEO(o5W#mjP~|lXhX{QrDZd>%sJ+7 z^o0TruiuZ}Ugq$%dHqZA6ZRNhxoTW+0d+Y>Xp#!WkI$cQ5HfZ>V`7pZ|Kn|B8fZfT z<3iY(E(mj>LD1108oJ^nJU zC)j{esvftgB2_en5!GY03d|KDNpiufy`B|F5siqotG}S*lChU)6)mU5sdO?;3gLP{SjmsL`np_%uEm|P7#4R$V z9n0Qod5}uWYpmDu>X&FO{Bh=1Pn}ouA%#CStOe?Ps84z>jm>DXhZSX-^qU&}rdBo$ zep8cvQm$DzAOtuFmX&9wlgC{(oCoBng; znrU4&U&|j9*DeleW5FyRrluvHx)r0^iY8C4f}d^-1QBOW zAt`>R`ie24_}4N@sms?-|1i=g5DvZWryR8-1ezgYJ0Xnh?-j{nsQ%S_Eu(T7?e|xW z<2n(Q*Eqf|f@tBr(uYCSU)SLA>rKd5Yw~3L47(?_a;Y$rh^jmz9yjkAXaITzmEM+R za2!;$shf!^Eazlj&OP%&h*ficJiZIp9l&~Qx;zH7;vMk)z!|`-?;{;p2pl>D_=9+y ze}Z)va0>7ka6Pc_fp~l$wk&o5j{yrGjK@b{SETHrc)SeQ0c-#s1M=@hl>IOs-vN98 zcn~<^Vek*Y?=t|W0Cxc!fWgP&@w(LntE6R?4ZEx~($H9O+* zKHvzv3o{h&gdN-ykCzblA|Lnw-jD199s_P41b)1){J>zO@ki00nz!Te z1HcD>4)lM-JLo6yAaEP7=3Vdu-vnytZ~c4m_$c6ZU={Enu$_ANE!qV>0Bl9Og};l( z{|U6>M*+s5{k#w3@eW`a{^H6J(ud;lk&vVCFY$N~I6|3@W%WGGvc_gPXQ3FGBJ z`nfnaeJ>stHzLDq<->-~v=0ed+qEmtzWi$ic^5LDhh=qDr6>)(;5bLGgXQvgVyq)T z>zNQ>>M&bp*0fV{D#AJC8?CNeva_xqe2R5<@6M^itUsGNg#6rlT8}dPbK?g8jIee3 z#_VfPu}aR7$l8H(B;@;=nGe^aaPG(1ggUM;=S>f1*AD&}dbo4ykVQLzKhr4V9-Le8 znbyC6)=f~JA>%FC^`CLW)PYlB}_?t&R>ly4z;zJFi3eC%U8 z^46gIa$FnbAz#)tgLh7)WK)NTI<|suQ-(T3|B?6Y40&Tv<$mPdiuzwoslUmn-lMKu zLg$=wWA>j|9?G2$z99H+NU1k#p;4U}^9A7B2fpr;@QJyNIWawjuR2kksPAFaw-9_B zTeH5*d=6ywfv+BXrDndF;p~tYcJvYH$H3DKo(3~dg&6A@8?9NDhB+c%*e~YcV%QmN z*l2rJ*t#)0t9|FxUNKyp3zgv813ph0pKP}Y`6IW+<9s38s0Z!N59e&OE;U=2oPYO# z|2X)knCq(-1`O+vvh{#x1nhj7nP+x5yV)3LIUf#zZz1?9%zT)G8?CowuVg27i*AVi z^1C$7zl8>G*(o_L(cigxf2qeQ;CmK)K7&urYu1@H9RldJLR#vv3H-;wUuNd73|r@H z%&ybtHOH5K;M{@rV4j&z$(mJ?sFpJHfVT#`55mVAI<4Ml4d=|gB|E#_lF;hRA>=;_ z{x`8Eyqu}-yG=5e4iUEEK)pe%6>H7>l^d)qZc`*p6U~YCxORJJ@qeJG(H8hIT{c?Rn6)nLn``$N z`0&js{ENt+wb&RX(SPukyc~}|fibfBO!6oj%kD7i%r%5Ic?4zly(0a+wc1n$KHClX zmCF`mSOXp9LzX_XEV})^VCpR9MGbg5z~2Y{Gtq{*uX8B-xL0jC%Dx4z?tej%>zijBAR}$NBJ&)-e9l$C$ox zqZLbG_Bo~rRfcB7T3cp=-6J01Ul2)@Q8xcdqL~HAW{hG*tYSeZVDMsGQU5G>jjI#A zoiQZ)Y>b26U)PYU6T}QLdGxFqs=xbC_p^w1R%Yt&vgH2K4tl`<@XmO=ob%u*6f*j& z=FY=rJE#!0d<p7HYek|D6+^f#)T6JCJLB=mxHj5o% zIrS^3uW!GA~`ehzTP>4z=aVq6?&sutH~TS|sA{XNT*gS4KkbKkLLAI{oD z;?r!T-)F7F<;^)~{oR)RVopyMh@&>7ZyJ>I`$5_(gL3{v&0B;&6ZK87T*~@JkNMRg z?K)fpp0eVqH|L_~2WM{>@QZ8^e;9=HUkuJUf`CyKg?K~X&U$O`5AE7^J90ti5iicU ze~^}si#b=~;&w~U(}T39ET0Sj@dRrD-v~BW@J816vb9?%@uU0?>A9x>N9J6r(dSN) zY3Gat&U@7Y*ReGVmw(QBkk_wUaSdHGTk2R@WpCCegR-|~=X4F$9%gNS%{~*C-?r8w zd1DTf@6O4=^$R(3aQ*TCUVk{?DGP`fY=W?nsbdhY?;JE8*P8~Tp8E#p{M6KEEo5or zn9M)pgTYz=7k62^rh^HVw}HS@yJt2WS7p`kgFf{B}UjLxZ!U1D_z* ztF|2Ed}zzTb>|>ne|OM1xc;TMeqUVQ&sKW}=X{HE%NQhF|L^g?8u(ug{J*b(qgT?` z;hr9cj=biQwe@s~k1PDC!e0P~A^&6GP~d*t)Z!-vwTHX$`-!Aq#7#Gl%gNvW9ry9x z6mCR20A_|18p0KNv9dGpJL|w)nWPyBNRgNE(ft2E@{* zp$c*Q8)5(AJkbY6P})(@c^7FK-XZegHI@I;aTNW~kFP>c6LH#lq|8j8f-hNk%mPj? zEuHKbRl6kA5)6&2bqZD1)w%i1yfx^Cm`z?;hg)urkF5*(`~e6f zVR;>jqCrn{V;xHR&U^9Z zJoS0Rxux^9;hM5?-f^Dtq^|TsZd^U~2Tfl`{nM}Qe=IRpWYzh4an1V3_b&L}1)mMq`l0jv z0tq_bLGT^yhwn`j-x2T~>8IVdz{jy=yYyE_!FL?jdV565@jgzyUH&%gp~t1~MzT1_ z$MYkcIv@Sxa&F6bU+?tp{yvlY5M2K3;!FdDSRaqiak9RA z5;(~BHF+uiXC6+wLl*t^JTB;Dr*!}hu69Wb@O0rVlIPm5;PZUb#@A0dU-s>X-)Jpo!j zSl@FXj+Wr76Cm^V0oPBJzMb^nK+`vK4w2@Xbl@JuDxfWh*0zA&2L3Bl{*$0JSf9s`pm3U#RF|Q1nrhCqMhs0(wjj+va(ewidK*FBa(M4$!*2SYR((K_4>of137V z&>xkcFb>woy+GX_S^gKGOUg8D8S;lC|Bs;cgVujGrtCK4SKylYSAy0L=GUjtcasMH z6ae{u3i>GY&+#KYOig)^!as^VN)k9Yo2I4YyHn_uDfBl}=m%5iU!>6g1f5^0X)lZR zhG=8qa;h*MoM{(ee9NT^0wv`On`j5pZcm{f0^NF}w2uqXzu%bh*`LFpx2}Rs;hOo! zP5IQ{U>p8Z2kG-nG|T6kXy&g1%@~8naBZk|6X;w_Qtl5^zqf+!bBQ4k^fu(1`w!@a`BULy`#jidgB`HG#h~?r?XORvvCU;1%zrqPL~b& z76MrR6wqe*E10hi*5?Jitr1&4m}lhwmMNd|KW3uIzdMEgOA39lTp~mZEI-pkv;49Y zx-*6TX$t*s(A}$XqAc|{L@qg^e%3e9M6A}(4%_fdUz`Q zu`4D2dK1md8%#9oy94xkjITa^y`Z~m@LXDr=W!E1`#XgHse|P&OrbANp=(m;))e~Q z6nZ=8_0R{On^8Zpl>8CcfH(?$^VzAeM{Y={gYs-lp?{r1|B2-{YZ{*o&_1p}NaW~| z735<5nF(4y*!02_x*c>M?CVwJvHZ78`E36OpmQ<*ZczCVBIWh#pm9)q6 zKo9MavHz)91-_P&UrV}E)7}8E{U3l{4FBZ=O?|wU!vAN`2f%NE-xK{c-Al9mmJ~XY zLjTl6Q~qCr&hNnm4(jV|Q$Fh(C__e-uW9F#Hm$!?Q}Vr_bDQ9Yl)c?+%4d7ef!?}I z)A)=|*aPV9x!BoP>%r+)Z&}M(~o++R84LJ+{ zse^P0=*3tcrsA6NxE;?H<;oB>)Ehp_{#>x zKMVAMFybU--}i#nj|*|}oQbBqyHn`*Q|QlA=+n;8Ym@ex4_dz;8jicViJ-Sao>H(; zeyz?Q3V58IO}-_bCb!t=a(hDUct^9j zwFz%x*N>g(oKUPW!`)cl4r-!PlRIhkq2}gQdONN;Wulq#c~fV*+&aZ(^y@?`VpmPH zqQ5QazP8etKi?(x;jq(JH4E}zJI9&Nx8~$4mc>{58-rfbF61sB+ZgbB#yX41R>&9S zocW1294FSc@Lj%&LcSI6a#hv1=M=ORO_ZiO7X`}$lR*4!2GJEi^4ne z6E9n$2Tu0{@#Y^oP}SCN!x=g>9`9JX`Y(~L%d!Gjo77QZer9RQoF}btqI`+5Pz+!4 zSeSaEnkh&$^To>b+ke>`Rmnv80;SMq!W7b*AD`CzNlW%CK!yRBAYS4T zX@VzgxWD$NSI(U_wbJe83)&U#lnLb=FJ_dS2o?3Nd9#uSCf_tkkOz~4-60Xv5g$^) z@SvyaU}cfp-RKkB?rzoG-172jSB<-7>at0Cr2#phqCXQX^4Z2(F z+`)!W%Zjl}+BJ9CHFKuUt|&$IbDR@7gUb@1MEOgdu6fn&<;>))D3s#Yc;T!(L2rSx z+8a#&u*=n$xiY77p4U@fs(RF>Dph)Q7EkvE=dNt2^5Z>qe{j_tzQXQGVH!UPOcnez z%3H4(8^P6%FIeyimiT&0*t{r~p*IDtD&eH6>O_~!4)4EXe1zwbo}#$MH%)%C!#=p{ z+C6S4ti3TMKVNHYIx6)V|Tyv`3iRqhggPBLT zpwMJILR{TCTuV@*W`4TEqg|_1XBobWtM^Xh$C@eAFF(;T1S;~jfRWPzs$JIXYjNXE zZhw&8e`cE-YT;Y*{?aCX&4sTiRQ+GnF`+CSvin(+lveU zpf5(TLVjwK(sA8vT%sDWB4;q(vV6lO=sM)AUfy5BOY!Yp%D6zO&NK%t2dKQCY8WH9 zP2=to?n25~T-~^=#e;8Ay)tf4{T6xn^NKB$+om;po9kLvB~-?By`WfxO;ekie06Zp`jV^PU@ynKS*_Hkd_=(n&PIgt zSfMIf=+hX&mN$Bv>fvpq+vgxCZ|!u>@B|F6*01zzL#`GK3yrxUr&@-44&%0#1v5;+ ziKZziO_6b&3WX9TQCy2v7cq&q-msDh<({U1S`vjs1^N8L7wRj1cTn9z7>m4=q^QQH zhL#$rsi>+eDJfmBVCu9Aw+ff#Omi^+^kQqGjczu1z)sQ)X&Ob-@Zl)GzD@L}z?pLQ z14$UuWQt2#RxVFlz&Hz~@X~Y7suO-Av2-DRkd7vG#YeBz2#Ik|lt3sD(ATxpLYN?k z2kU~a8}JD$;^o9LhiNV+Mw&#yemq&hB-1$Su2#26>2gsQ0_VHyho}zWNZ}@{>a8;SpS!*Sy|eIO8c!&7(kGB`^jUDJ18|?>2aWQqP_;{L4e`#lUmoqg>>CxE+y^3nC%Pm zzBI8$h;?76U9KOA>6QNX0x~&i(LZ#?jTO1BA-RwGDv4Vy{JkCVM+nk(9F(;p@TEHB zsyZQMiOAHh3N3|g>U){!hWkYEmNJ9_{jTZ+UW~#BJCOIH+%~JmU-76@vmG@}QkJd9 zu0jz@uq)Eik1Em}L}HU8g9&g$Lf%89*#K|Fi=-9f<`jG1Ui?)oNY>BYxZKC|*7J?w zWDnu2#$Ucb&s2a^j=zqP&$?(GniqaD3ckO-#N-Gy>-?Z_E>ACaHK^#gSsRaz@J@ i7voM`{wWbYM8c5C0Z-PLGhcWz4DWc{PkrfK@&5yRX1t;R diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/linker.lock b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/linker.lock deleted file mode 100644 index e69de29bb..000000000 diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson.node b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson.node deleted file mode 100644 index ec10a654ccec8b093e2982c214a9ad4641f77238..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 57156 zcmeIb3wTu3)jobEnZR(7iHM4q>Zq|oMaYBz2~rIt69@!|BtcYg7?Obml1!XS2nYyH zf=tI?s#fmKh)oI=f^4b(-^hff{A(5+z{QZw<+Wr^iIRh0gz&SwWX915Q|0wc#9tX{n`Brs~0Kb0b zA&sY=SM@=#e#gCddow5|M1tZF7SmfMxGZZx~mPs`Hh-yb*u-qetnfzV%eIjJy)i6UvS#I zlHr=AOsgDFGR>+@zi@okoT;{#2W-`*Q5Kh$HK_1ZZ9`;$b}LA6u+9dYD{wa9Y{u!s zc>~TM&NiGpR^eQYb1eyV+yvZ!^Jbi#I5*Kmx&G2ES;J2M(0AR(@1A|IXK!)mU27bNKi#n7!qb<{UHfT8*BPhU9{TI= z^Va8_{-M({_;1?|EqSo(yr1|AUwh%JdtcgmFw(T^?!{|Id}zD)^S-tFUf$&FTL10O zA9{LA#S?q~^41Lt&VBw5x38ISPUVo7zI*B;pWRe_*Qy-#gVJl)8R>WZo~%?-+RRA4l4QFTS>?b=Teh==k$p-`_Ce!-qF*seZ2OFT;8l z#P2xs&GUbYZYCW&IH2m_ zCHw14>2G67e@B|yWBuVoaUuWbtW+J$KbS(tI4RY^{9IE9$@*uTo-5^fDTO~|;%E7a z6nS^2w6~QLUmfIsI;H-bhv~Vpy^m7#HO@4?th&UW%>Uz*_70o)+1{He^?f~so{};i zhg0f5!b;S^%Y!NQ^>K>5oSstuAd|i*s)Np29jwoqQvN&>KlATMsqgI+d#E?bPt&%W z#)IWwG}%AfZ^Ee`%zr*bU)QIM&(f6f(NgH$1NGYE`0YxOzc9sq_NMf|CQ)42?*vnS zDDP7#bZ3e_W~Jm`o}$mEQs&E#Q|8;LDgB+DBH!Oq^ldZgllohc(*A8sR0l8L*9jo$ ziWK{4H|d}K{b5S|6PTzDUVb-4z7Z+)-JjB*r789QG{wHFQ`&nmMSloejDzieW>A_& z`dkxzA+86ejL!!t^=~othx(Xfl8^M(6#3S|zWQ96R))Oc+7Rt(jAt$uYOdcGg1!=k zwl34OCt!EX-^K9&^AyG30e;7gh&vSjWw76^OEv8uDt`X9+isoGf@=k#L8H&vA{h5mXFq@0O#IUV2RgCt zq2HtX*G43M$A?iE226d?NAEKXT{pC;|jAw(&-vs;VuF@aJ5Zl_{Qd*umW%#z zd{}-b>}xUP$B+v8x2SJ@c|t$qU=QnOBd$a_*8iInc{f2neI8BQg?5>LKgv5$ex8zN z81%7znx=t8}zocf*ya+?bh6lEsa6Vy%d0~x^@pQJxz_Py_$Pwn|q#j zSz{pR^_Mnz0s(J;rQ6%xtzLh?N3P&1cU!*ZzIIMQTanW_!_!jV-E>{hn55!PQ>Ric(Kg(-KeJ3YX}8eif8LU9|)uzte39 zRHZATV1AKvQbk~TlW&QqX^z+9FY|hW4X&yxbhoIn1zLtmC(2etuhZHcKhL|=&?~Ad zE^R_Jl;z9RQdS`9sF^<}tpQ!xN{WIB)!ty0Psmo$QlUgFZ}c|RyBd@FHNLDd(CQ0# zW#_((G!trD{K>s?7U)VkNzY((P(smc&kFAeIIG&K%G;n~XYvRo2B><1TkkiAXu@od ze?pw)~2T7oC)%(UvcbCf#Mm06ZA^jzO0HB~5>WtwE3I4!hvsn=gv<_UU)5hd*3 zWIr^@S?H=XsG^F1vJl;@)L4z5G~FAd*zMl>DnF*6KiKFExZ0&zP4)Xdt3*4Zm>77~ z{KQIc%QDy@t+-zSm?zsU9RG#dQI(3;ocU}S6WZ%(cIIF0Zwz{2OXCY%^JXPCn(x-l z-^D!4B1|PDruqWC%aG(mZXlsGXTE4q`iL}{3ML4dq{vmU#zyE+wJAlMQ0c4lG-VKm zO(^l4`L)5NMdp5+GK)nEWN=m0HC8nWFPNm4@q$C@xT5~QrkX^Di)wtT1ERMkNn~GL zRl!CfL_x7|t5chr(8Hk7ZQVC7hsR#6HN(^S>fkHe0biuMP_;9Hmj2a#b}xv#ZLWB(q?`L&+-8vq?&IqHCgG zG~s@PY~%`>a9s({&d!-#ogRY-8$f-9b3#o`vX9iJlvXeq8lDqsUgGtOw0c%YJ{W+j zy;Y@<``qDx#?@g+wu*Q#wMQ>?Bxu+@6ufhuoO$ryPqJkJO7%H0} zl0^%8jHUMotGH`AjfY38d7K(6<5_XW(npkA81T9Z)(A3T32ABH1Ug|cnvJ}_&rulq` zJ6mLTnKMbHD5;?a;rf^0=Ap%nWu>k`r68ixY0ONHRDm;Pyx<5CCS)G9gcJyaY8yoi zA#|R|K!7bhNlLoYDfvwy00d9EXDOQJSy|eIAogStAC?-{UNc|xfa*b23>}MP_=}q- z1nOQt#_gnaiTNsnj1&_UeG+}qBQ!SJvQjuN(X#m_OZhHFM1p~(4R^`VaE_;0d6$H; zoCREV>VmEtLU2Cj$p!Z%@4Q4q=$JQHtuM98siy~ThS4Tl41%wKzPcFh&XBE;w^8P% z=_XVYF5G3PC@?W2Sh=+6BHo?&eL-IkE7@#su)$Yf!J$Zr^qp?!M7{kQZ*yxC95HSb zU9PqoH^Q5$ayF4zK1_2##-OHg(Pv38m8qtL%&f?5u17dM5#G9ss~eZKc!D86 zg;K_(99uD#;_6k+OMFe{JLmMqh4YkZFghsgpGKgkj8FH$3IC~US>B7dI3E3ZWN8ry zs2hVa^h$GHiQyAtC8l|PoewdEKU5d=`Q1T%6jXOJ^k|~)kah75%W)P;>(Da_oak7R z-;>T*Uj#E$UVsu}M?*}C&Qr(QahZ)tKR#J(N_=cy`&CFDKwEm}GEW z-56|;_k&Yg5IEvKE^|5BcbYjnotU7yjY~~Ag(yb#{tBm4=tcQgXQ7cL{blNOE=X7} zBXD!vk68W-S1$c+RaDWE)7SYz2wKvz=mup_r_U~B>cT;mCM-PRY)_`CgaPP*lp6MgLrNM6 z8G%YObW+s0D_iu)iLI(Tl0^TV1xjls?W7B3xn^=?Y``~mq6TtVU7b5X%1Npf_tUr)B%M#XPV%|E z_1I#;L|qaH$h}?7jbKX%FqYew+Vs*=_jr^kU9e#4vuFJP*5{U-Q za(bP-<}pCCX=a%%+W(DzkK>sE_w5}xtG>DloANg8Q>5{*K)$UiKOgk39$$e@4A3li zu*EXL(YIrVKU*8h2VFQU(84biKj{&Tjd)@(P#dZ9pVf#bK{?umip~}JR&A`J1Lre8 zODj_K&BOykK0>%wp+{j*VTZz~!uu3Hrtl{U_bA-2@OKJ7RXCtTwlhrOc08bAyL}2j zQaGkeriT^YsqiJ2OwXS#@%MPf$MVg1_Cfs19Eof3Y>M>it0b<$gBa2uDXgrP^!F7$ zsqi_4y$b(R;d^>Io>{S;cNLDQm2|bj2Ne1j$n=L5=3XP|T7_#A-m35pgMD9Q24CEBMKeYOTHR~n-uO)cvRs9 zZpk-KVOZgIg&!z9W3l9$uCQI{NK!Qb~VZVVT0K6nYdkC=4u<{I@7{ zG^lzN-k@-^!XGKzqwpUJcj3XOP1CelDqo6h5PHx5C#HzN7H4!oMnX z;u$FGnW}Jx!ttwQda1&h3a?hUOks<{8x=+s-mXx)QPz{I@Jxl%6<)3IdWAFBO8$kN z5=V4NT&D1-LcUW;xotg)PvN-=k@Y`vEWHw5^Bg?8R`|Tag9=}SpJ4v`3h%l`(mNDZ zz)qRptneu8hx8S&3*tu@U*h8!AL6eR{zKtxj1SZ2Lchd-!aEh7d!tORQ}}Png=hb# zmtf5}AMZB+v#_!_NC)xW80HCaQiH@Yg&u|L6yB}yS%tq=IBY8FgU|lKT!rw9qz_;| z6Q4r;L_B5|xL4stc$UrdHL!c)81@f%G3%SGX&=Jh6XWpr#E;>(i8a(KaPZQuP5M*n2Y7_z0BoY& z0WC`;uBP3A9>sA0Vy{r(28FjPd>#Ie`Jd4~fzQFO5l_8A;+qOr(_WE&1IG_&p?v~F zlnXkrP!3>_asYqkk$5xZ06m-S0q<3qMY{lf7V8Jz%=&?YSTFEC*6Y-?w~2+YkLeQs zStapIw9WLnv=?B#!YvBNQ9nqxVx1;>6}BsU_iCAbOyN-47x?lNUa4>l`-Ajp3a8dd z`h4mO=`Sg~q(#zG6xJwwRN+dj1LXgf!iN>UsPIjNe^hvNP}Q&SQLGazzgyww3Qwip zke;uwRN*xWM@&QhS2WE(M`E|a#}qE7o{-MYC(WUTr3hg(` z^nD8X!Yj)=6$TXkv_q!-~0~0}@|YC-Gy-jr^-9FYtcK1>Ap&#DB7V&}-Q)a1rYV4(pa!&H86+ z+U-R6uHCdl`Y^Twe1_?lLVrZq?5}~ANwClVka&;6?nfkT`;o-=$PeFoS}AaVrVUvm z@m0zV+If>i_stS(C^ynSq};$8SnhP#Da!#r*eLOwO%iufKSjdzoWfJdD_%iE99Xr-ayuKWG zCDudYEKLitUazK|GgD%n!fTPo^exN}YuZH8z(Z9MFX6lgeKqGZ@KN$jfgYI;e4Y8g zW3>_okPq}i+CA`K*hU z+BxvYv~S=6g%fCB%%`09XxhvfK*+L0;b#hmSIG1`6_!&U;Jcmj1MgifF}p=#H_Kg! zID`Da*Et@*HKfnQdvEk7z}=*==KO*9E$E%~=HvZUjwA3T(x>8nlj93KNE$NlV?OZj zq*oy(<2Zax)4os4)wG`zF_yn4PJn-A`O%n9M96qA@td$4)&qQwbRO0b_6InX={I10 z(te1fo3S3TAEzOnX_PpbG-R;Qe*tghI0E0}I0Aph@dEyRlSB{eyIs@XB96y0{}~f$J20 zlX?NYmU;nxlk#Q5|5F~|YitL2CHa9*Fn=oMC-Z?HGaq;x^MS83A38dX{R0+o9D&QP z0uDudOI!$hV>)mw^#$C-dVnh^KkyUsjnlLu)^jn|VU7!MoWkoB#<&h6eKKr=_#FES zyqo<6-q$AaNrh{wL0jS9sAu3eNY}w{k_HZAy{L1B!e6m|oENcv;5S(hu$cKM+n_M0 z@NeWtdR$={%Ym*^xP#?EA6Mw0|G6CRJ1gu}Xyte!J*Mz3rVoT)St9Wn)(83qt_Q%z zjS^3v4;+Q{U*Rl;gE42A{!iL5@R^9jA95TpcOK_B08ghq0UrrU9DA)MV!I93fyViy z!apb+fw5w`OJSSBA1d6haL^*jH(6ne!tX2mPlbaK>#@Eo6#5lDr0{Kp!`zbZN`o*|tMGP(FDdMxUcvV@*Z^_0!Y38}RbfHB%zs|tnO;eI6h5c$?+Rm_ z$Efe89^jqO6Wf8Ud`ID>YbAa8%@V)Xq3BMDHMdHfaht?zSgu&p>WC=w3~>91;9*BXhRk%lkLZ2($u z-$sP);|d=kALuJdU#4l5KnvE0g{0BODuv%x_>{svg^fT9WZX#nF7D%rK27T(-h%ZS zm^B#nGy);Z$R4BvXVOoM#ry5xC04dd+(f?(`ooaKuhSoaj*=fd$CwWc(Jp{1IsU+I z$eeWv=oX3l6yC}Dk)8`4;#UI_r_e8fKBCaZ_COaayhdSIVb(gCKbrD@uYrCLGQCK7 zfL4wV@ci2(hMAAH>ewF6e|X_O}l0#@H|bsk#+@qfp!7Bly(68k;2i; zFTpya@aNBy=N0;pZ^8BB$S0nUllVP_+gZL6dSrQEJ<^E>6~>qkU;p}zz_Sr2 zah?G$ru@M93SXk#T#LAfI0o@0{YEX~y-taDlWs%393S9F&P(6|g)6Bq(7z?$639rs z0`t}Z9q_ZaNc_)RC3Z6XEchq#0gsYK9fLSOM#2v=9e4q0tVPd~23|?phVOJp1K%Y* z58pJ<4*<8U2V(BOz^F3AZI{Gt z3eVUq>EDwN`9EU6Ap6u!zzZ~O0@}<1?<|EaMChc8^+V?6EDyY-8+b4DcQPS)OeQvVhhD==pAIUfy@m&_9zn zd(bCxI+HnXO6M$r=Ezy8I7fa{)cLJ+&h?6Oem|Vu>QM4p^TzZjxmdpq1klPv$c#Fq z%)4`V$z$(MSpmw=uRio+q-@yv{7q2bk=*q!j3Gy8Q8dhcLdV0R;SorQSo03;`*%DZ z4d;XEdg>UiyY4uO|40Al%m2;_|JhP}G~noZnx(@%mWW`BZwZq_mMK2!|FAdA>Mh}2 zxz}C4XeZjQJ~S2y8+KkUnjaCp<2d*`-w2)_4Ih{759tzVkr8?EzVNOQ*Dcx^JvUEw zK=gk`WYMv%Mf)S6<6WUQBWwQ|*&V6vi=3M$#wl9cN0p$ac%(2dQj=$u$*woUN32YN zp3Esz5-DU*wM33b7afa+jz>}ZkwrTry+%f%Y^GcGTaHI$(ecR6NC=xPk=G)Xc{a96 z?NOhW@zF)cqajhv(r)%Yd?YWr$kzEz`|!)pT01N{{N+Dnjea|l8(s9zy*s;Vk40QZ z(Q_r@(I^s_1=;rQ6y@imf~u%17kx)HXcE1Rx`yH+y>sX*QoBM2Og)BnI0tAjLpeF9 zEz*%-Lg;9zwQKhAt~Sm7%o*X{tWCKX-{RLIrPk1&yS&FakX_rq%0lQ5<#9w^wysdF z-e**ms}BOuipi=)c0x#K5#kON(!fwTSH*RNQ&31jc2=xc)vX%p+JV(iv==@y17jXt zI~3#Zy!7m-HW3}Dm+I_^^oA~qx(<5B=a9qRc?bSa8d5oaT_xv-4;&r6t7~E2 zMd3djMeBxc*c_!2mb5U>nyB57Q5j3ui1EdJq2l$I=LKgmkM^zGOJ|S0BZw-(y(N)_ zc{XOhjOmY25gix*ner;(A@n>`a;qs>Szgym6j0h9v>%;4G+LV*0gi}NStC`puA0H| zNY&8wleJLR-kof>TY8@o6cf#M1;5Jj7`_p-pITdVQEt>0862Hqi_El&|7~5f;@KpJ zS{B%%^M+ntHFV_=R(ZCZm#lVsO0}7q>eh!5?OjcSIXAj!XmqVD8n8wFN;&LL4-UU# ziB#J<_J;m8`mf?v@mh0A0;n@ zy_zkc(y3ij`=$(9d4;p*@Tt0a-q08M+xpK3tsIRe>@V)^vaatco7xvqMMA%0%U9dN zyUwNjJ5ln_)YE$~1-l&Q8Trk}N|Fqc}n!A1whd1NSNKfDWk z487M8szzD1ZXZJaDbRP<+P(-p<{@aR+fdcgh%MZcReUt`Cm2y=O`a8E+2|JZsf6St z#p@k5&Bq0_yhX?CPrF87PEu^l&(P5DXM^n>=VWWzx~syU<=Q*e24qX*Jqs#SXblw;UVfNsWY={yYDN5wnJLVz)vTuGu zR5;Myv4`ZX?+emq@7RL+!k-PYcPzjKwMB*hEk)ts&rY#-*o9yl@PGKTVfK!Hhzol+ z?K0FGF1J~`*1oyPwM$u~utxNCSG4v``_ny<+FjH=Hbt*_RhKR7pz&YQ@I(GJ$$~depId&7`8*b2zetg2C;OB zb&JNZUHIu}xecA@s(^b~4895Np{`4DSydH+(v5m;kJY8cJ^rm(f zR2wa^QYL8|Gaz+1O`5A+v z?Hs(=OlT+bY>K@`!KACnk9B{Z-s%a|?ugkSuh3_7hMG=FrL}t^v-kb3c~5#57fDUn zpJqU1sG{SI;F-xim;gR8uac9`Lo%oD?7nDLbezWP2*b}MMn}ca47debxb<@xc ze=XL7Fi0fS`NbSL>>s?eu4;-yMAXYCuycI1K_xb-Ly7mAfC_9&+ zqQn|cvEEIIF7!Ym(Vy^VcsU!uVm+y zDtYSxRR--(*ZwN9c3ce&oB zeM#TSA(TR#7X1e6QtovPn&w@PKCH;yBywp&NEMl2l4YgzY88QJr(`APLI;LeVi~eU zvQk42jzMMW80e1Au?4dhVmV`Z2BL3+Tij$o#w}p>e))nJ!VDmL_+|l{g=v0b+79MOV`?636n~#(W?0R)dL@~ch*p-1w%gBX$j^=OXKU? z2X@*!*?rC|u?8i2i_In{EfftHT#??CeEtujD@XqjD`fqso0xlLa66$xT8GO{4q+JhS@xgggj6? zD?uGbC6S%6N7$g)H!*1iLj_^k`ZM5`1B>qWaE1#ZP>*R8SyHJFt_(cPgTjQp85$Im zT}@io4(emkF1GF9SX2(J2Nd;VFN|DhFsh5|n| z`jf(6jLIc{xix&qWZ1!_;dM64x(0L;O*6}6#ZpZ4Tikn7e?z`29G7w+A`TFpHr&`G zsE$)3_6IDHSnizf#9ZhA`8Qj~>ma=pL? z-DvR8_YcS896Ehp@#`2=c17(g9;EWXCwNY_`*-L+P#Nz2f;EwyCUeu*VB~~8)JGK? zQLjXfANti17$=gMJ?m!DvJH0riChPP#+Rjbsd^W zvNjP}Jb#?^y#P%fehh3`>n;#%hI_$cIDx{9+5)yMWVVqc1=@6IyZJB zQVXWjB~$p2(Hmr0BCcoco8OnN%%l%W5OHm@Z+=|QGK4_ZfQV~*uj_HFN`GcHmJ(wf z8AzdPcj)Upr4ydrw2tJlOzJukQtL=j-Y}-~r7H2Usk@0NJu5u7~ZL z4@mb&O`A31dKh!_Qr9-#kA?QD^@Cn9+qDf!k^zS93ZKO`cNx6oe6m@3pY)i z+D4EgC%agKwp+MJ!TlO*TzIGD@vc1K+YE_J=jCWN0bdxb9C2O#)CIbjaiV2fnM z@-c@nZy*+(0eWIy7vApAkDjLcQM4i|Q~tP2_s1#er}mTnTlz$CX%a^i+7j(wJJ#fyY-$oqfnAIkmsMc9u&w!2hV*zQufwVqAbT`KMuc9+VH?$Qyo z!QG{JhDkVAfj^k^hdbm2t~ME0w;EU7#?=<%>MrBz9^-1Oer1Y1Ml$x$cO6{E zFeBjjI;!z#%^0oHg=Sm7Fk=C})tV@<*j(U}j0N^4wNP&^FfU_)?xX?@<^q>xEO2#F zfhKc-i5Ux=msFtDT;R%#1wOEvI~_C^D9>2n(WC9Y+Fb4|uF8cT*jZV%(Y2nwx*O>m9flu<}#&)=0?_K zEbsxs6r(exMdkvxWGukHPoF3-#av)>#sYz)0wv}GcVsLuIjKOIxxjZa7O*50m|-rk zHDiH2&}E_zmF5CJ%vgXo+lc~I<^oS-C=k3h!CYfzel`>HR}#z%^k>SrZ|jLYhNZ0= zkM&B8WW+0YVD<>(7UcW6+yTYCB~VlJZpJ*IH!G}Fc&@@fAQE8y4;8LZI8Nc;u=+Fq z8HG10yi#GdLIxJ19fh?D&sF#bDSzw{g*PZHQW*afOZN%hCy@JzU9SDQg0Q<8UTw1k zFJ*wU>05|RKKU(giV_S-H$`$^VVdbCD~SD=q0y4W&IT#m0_?;7p&oE^O^d^#@HFxP z47&_=cIqNHQ?symB?_HQ0Qoapq_r$Si*gJNh`ZJyz}drbhUps85P0SUS5PkY zm+`C<{mw-z?Eg@re&dYxWy`A7;3!eE+L1<9tR0G_*51JMFu$|Gv}Z8`%xP)wyG+5B zMAsrd8xr;FtpkM#u?``PY7b99(4@G}phc^j-5&@RuC!Rv~PVOK(!8 z)a?DDB2}v(%z_+dnf@}rf|nor&5WO@VC*{RgRPb0ku`zY8y9B)zpGHpopgTI_=zK25EcSUw8B4c~Ine_}M$ zSYYgo2e9(#Yq#7}w0D%i0*vQVk5iKCvSXJbQSBQ$--t~I)h(rv9zpKoxemrE6MXD| zQV2ZESzN^qauvJh*I32)zMj4hJ>3?&flk@X9~=ZV318hEz6nn;3R1Ng`W-8@v0t-T z;YF-a#0S`sT8rw;^3l3oyAHp6sP$L!X~4;z7^E9*`t$uJ?!T|?F}xd}KjzqP03t)77nI#T#8klo4jv_U%_iY+w2|p!&;)X+su~r6EK@C>uOqB?Kb)y8i&praXpGX z+ARC#TbPdrJBzk+LmFI(d9fY6#olv5_x29nkeeEexE_cmGmJ=Pko&+}?VF2aGo~GP z)GVF@{Rn--teFl5=U&mxbJ>O32eNCob@ptU{TAOB=G$AfKiX8gKU(`}r1o9=W`+vf z0TnOWKx{mGd(`ZPqAn>tUW8J1Zv>lg#Fx7IunT;-YinB_2Y=z682mXHQM@9Eb(l#( z+Qa2YGDams++H%|VeH#KYVUZ0?&+0iiFy_rU&wOC4b!}_cm71E3{T#E zq{VJ$9n_yOuYzZBYxY}=3H5rV5QksDAX@C3AJa$BR4DYSG@`fkJj2~$qCU>Sw{PaF zgF+bdvz9xk;{(+3Z(hfoqn5ZRoNP8JKg1_D#$yn?s#Xp^_)8e~b7)G7y==s_=Kk3` zUqR?4yTTv4OV|AM)qSCB9y?i)DFFdj$kAb z8nt&;p+s~x*f;^LVgereVLIDrvOymsX@GgT0+K@fQIx(p$(K>KHEI} zOp-s{aAXrSU|O=br7hXPcClPf5>6Fwa=s=m;AX!rFK(K_1$rx|aMC)`$RXnI^&QHC zVAOB_OJHKXY{ll=p}(QqhhOhbsxNdZ>s!wuf0Nly;<*+Us>N7+62Ze(d&iT)p>Eav zHf6h!Nv7c80h(*=)^r!Y6*cy{?xurX_Z6}3ndYE~9&ScA!C_&w;<@B>tOO6>eK5XD zB)sZY3=qASSf23^?rwU8+6Q*K7?Ggto7yJVucNN5k=iYY0qEax=q=&dyIZXT@RpJ=AK4f_pqbTh4PRz zzmdY8lgBZ56^HFf`kbWkwRaS-S>!!z_#?SqU{LEZEllf0HWk42Vyk!ol5vWC^JY;z zIpVW->_%mWyQF<_q|8gh1F_l2r5%}EbkIW?&Jpsty4hoNP45)H^DqdQ6f8QS6Y&Ui zBnq0^QHpZC8+(Tt+r|6JQr_ViA^Cx=A7xx!BFCPLV*xwgpuuB)xOXD-}F|7wgRx1l%yoPP&7|7u=^ z9OLLTWq(qXYV4Dk^g!EiJrG+W1+uKGN)z^5J+W!Yx>IF-D}_vHf^{-LZ-9CyIo#wuvnVz8K2a0o1KU zC+;4s!K?7}zb2Yne5QR%4>kuX@X9N^Rp&-%sxuzEUKbNt^UQl)dL`zYlbv$jp!k-a z!%wjvn%ZS)Lv8e*>V=<)`VNYjr#+cxK_;G+$vl-hPx>%G59<37+(1=X@lD++;S!rA zQfu2A!}yDEXt9!RL2|84daWj1@>1Uaj^Ot-rPkfz0Wx$qL!a!>&@|3*<9Xj^wyJG zjo|wgenV(zU_G`P$l92!foF6LaC~&0hGd=xGx01-=1JCMQh)x(^tbqD8TFU3JsA6c zeY_%zOv*eI+od;aSk1}%``_0-2kHxaiz~iw!`7o()?l)yBz1$;oa{s04E^i3zC&L% zsYU7|4;!GILb+Y8WAZ)tp?E1ZX_w>}g9SYVbX}m6#%H*q|Kj-=_BZe~ z!BErpp9t^61Nc^Tl3KTOQzD^R_zc&!V2kse=oMYlEs+7-CR!-IRUKNa+=4^)C}~Hv zt88FaWB|7V@E8D(wAi<<#Jga)WE)T`RKjr32;(nbv@NtHJiwCp5;dg1zbNlDgnrfc zbH@I76fgDdGK`^i7j8$h@NS|l@>=-A?C`&_L--)uNZ|=y7tXdM4IpZ2gNn8WWgPkT5^U4#!2wh za-_GX9!sU^2d~|uR5VL|5yDPOYsa@4{90@w%v1O zTePH5A5zAgd;o~gL0G%UtTJbc_Dz{Zrp%5GG!)Jr8he!iCEiCIu5<=H$2>ytRQn2@ znluQvqad_litcfD{-)kUI6DXLVImMr;sC=uox^f@Y%Z%X2{=X1qb9H$i#t+uxJl`Q z^Gm4E+zyl@n1UIZOv2$~%_?wGu98-EO)5EKrF^-T-&>rIJA#Cz?y+RY?nM}7mblU+ zai^Bj9aEHK@8ln>FqbSbmF)Pr>g#&k+3UlfELnO_qU#xv_tz6M|5_+L*`%)08%IwO zl}*E>7h|i8jtY*B*Lfmq_seO?eSXeewUCMr=1?;B7R*Hzk9{>oL1lU^9MX?vH@iab zhO-Ap*WXI{At+_j$Mz~dr^DYXFdV6BOby+7zN?g4rb?DMn4wH`eK!lFk2N1az7l>v zn>RG*eh#ZhEgyRnZ)~99Xvvz?8la%7g)R#7V&icKBfq1$IrNig$&lBNiydbl3Lk#Z zJmfVdU36BnZjNU$NKtS+XwFTt8+Aa zrw@J|E2W-AlUax)2#B^qTcT_G!UHY(Q#`35T|fBVz=n^iA`qvK@5|tMJ03v5jFaO5 zYH1&(Q+=R9`hu;92j*M5R$C(L_@F-*nnY)fhyWspXQ`OM#^Yi{1o(avH|+b8LIOi} zOe}O+>OuZF5H%*3FX{{3jy+c>t`{DdUxDJK^*xcSvuA#uoWy_@nG_9AUz@>A+I>f@69I*ic;8f31zd@=lr)78K#)M&4Td`&j>dgvB z$+2#OXjFuiM6)D%vov`TI}f9X=N_utZ>@a=@z5(G9_o3kx#ykQ9d9+i!mngpJKk~a zcx&%FvG?9ueL?8__xAS=$cj&4EHz_vFAAjx_J&-85;hR|4LjHyDTu7L zvLms-GAJGVRt}XPd~@qkxkNn8#!e(YGQ8Q|c{h`F2ie&Ls&~NP zSSPT%cfcvJ&!GpzOKbNe#YLrfS4{*g7{)RFX3K+gtrm6j^OWC7@?bW^wG$G*u8k~Rk%yoY&+GNae5>| zQ{An`>QT_FwldwO=>WLI!6Yc*jn?PzeS-Ep-}~<>rzzn($m_PFFx1-3Xp>K%gy`r5 zwh#6gF-xKsHg;6+4D9kb-p@ZTJ_~?H;h#QWtvPzFj7Rx&>SVPl`E}D%aj97e2lZYd z0-C2g(?4Pn=RJWTi1^mPB*>blVCv$D71a5xc_28aWPnjB_5T(8b+z6B!=Q|=&_2er z=GF~A5q0gFCk9Z(D-6gJA^%(dU>&D$Lmg&t_-)1pt!3cjjrUB8)O|ca~JrGiu8t?VVNDrYg?}XT+a`9E5yqlHku0`~TxKI7j z1RAfNo*@{tZ$1WnJKyPAdmt9!;HrHF*8$oK8lbl26cDvwky*cPkY?}rK8IaQf*)tDM_bc8^$z|rr$hYW|KeVj>$hFk@ z60gJKkiRMF@cAA5t>2bq_*;6AvcAcC*%j$OnmTH<;|fPUji%1$_lH`84*r6n13xPn z@GSE>rs$n>H2GSVX?kk`hu?ccsL>Cljgrlc;;#{o9^;U|%Q>o%KWFN|$R{T!G^`IR zROoZmCw{FSRfw?~<@W|cO~F8hDuWH^rQh#ag*^GQn4^5H{2_Z!)9BHfCiUV`KmKj# zMQNo`{mwo9J}Q2U8zz><=yA0A0{ErdHZSC7FQm{~O+%yq#Ox>U9rav}UmAt2r?=Z8 zdj*^FIs*8;S+n)0stNT}H@0W2rXTU4m!%o`TO4xY%PA-|iK%1E!=<5?I%-!;8TEU< zj-{|JFQzOejB#=xD$2%9Yi#lOSB)!$M;TYP1iy+sPX3-*`9X5BEz;Yivs}|7K}&u5!$q_8ca?8@-xNLf<7^`4e{z5F@xcA z{9gRBG79MZBJbQ5nvDE8_=M?R=D1FFeGz$TR{9JclYJXZW8hW9ub_jQ^+5x{lN4D* zCe?R>)zm^`)T|x@wFEO(o5W#mjP~|lXhX{QrDZd>%sJ+7 z^o0TruiuZ}Ugq$%dHqZA6ZRNhxoTW+0d+Y>Xp#!WkI$cQ5HfZ>V`7pZ|Kn|B8fZfT z<3iY(E(mj>LD1108oJ^nJU zC)j{esvftgB2_en5!GY03d|KDNpiufy`B|F5siqotG}S*lChU)6)mU5sdO?;3gLP{SjmsL`np_%uEm|P7#4R$V z9n0Qod5}uWYpmDu>X&FO{Bh=1Pn}ouA%#CStOe?Ps84z>jm>DXhZSX-^qU&}rdBo$ zep8cvQm$DzAOtuFmX&9wlgC{(oCoBng; znrU4&U&|j9*DeleW5FyRrluvHx)r0^iY8C4f}d^-1QBOW zAt`>R`ie24_}4N@sms?-|1i=g5DvZWryR8-1ezgYJ0Xnh?-j{nsQ%S_Eu(T7?e|xW z<2n(Q*Eqf|f@tBr(uYCSU)SLA>rKd5Yw~3L47(?_a;Y$rh^jmz9yjkAXaITzmEM+R za2!;$shf!^Eazlj&OP%&h*ficJiZIp9l&~Qx;zH7;vMk)z!|`-?;{;p2pl>D_=9+y ze}Z)va0>7ka6Pc_fp~l$wk&o5j{yrGjK@b{SETHrc)SeQ0c-#s1M=@hl>IOs-vN98 zcn~<^Vek*Y?=t|W0Cxc!fWgP&@w(LntE6R?4ZEx~($H9O+* zKHvzv3o{h&gdN-ykCzblA|Lnw-jD199s_P41b)1){J>zO@ki00nz!Te z1HcD>4)lM-JLo6yAaEP7=3Vdu-vnytZ~c4m_$c6ZU={Enu$_ANE!qV>0Bl9Og};l( z{|U6>M*+s5{k#w3@eW`a{^H6J(ud;lk&vVCFY$N~I6|3@W%WGGvc_gPXQ3FGBJ z`nfnaeJ>stHzLDq<->-~v=0ed+qEmtzWi$ic^5LDhh=qDr6>)(;5bLGgXQvgVyq)T z>zNQ>>M&bp*0fV{D#AJC8?CNeva_xqe2R5<@6M^itUsGNg#6rlT8}dPbK?g8jIee3 z#_VfPu}aR7$l8H(B;@;=nGe^aaPG(1ggUM;=S>f1*AD&}dbo4ykVQLzKhr4V9-Le8 znbyC6)=f~JA>%FC^`CLW)PYlB}_?t&R>ly4z;zJFi3eC%U8 z^46gIa$FnbAz#)tgLh7)WK)NTI<|suQ-(T3|B?6Y40&Tv<$mPdiuzwoslUmn-lMKu zLg$=wWA>j|9?G2$z99H+NU1k#p;4U}^9A7B2fpr;@QJyNIWawjuR2kksPAFaw-9_B zTeH5*d=6ywfv+BXrDndF;p~tYcJvYH$H3DKo(3~dg&6A@8?9NDhB+c%*e~YcV%QmN z*l2rJ*t#)0t9|FxUNKyp3zgv813ph0pKP}Y`6IW+<9s38s0Z!N59e&OE;U=2oPYO# z|2X)knCq(-1`O+vvh{#x1nhj7nP+x5yV)3LIUf#zZz1?9%zT)G8?CowuVg27i*AVi z^1C$7zl8>G*(o_L(cigxf2qeQ;CmK)K7&urYu1@H9RldJLR#vv3H-;wUuNd73|r@H z%&ybtHOH5K;M{@rV4j&z$(mJ?sFpJHfVT#`55mVAI<4Ml4d=|gB|E#_lF;hRA>=;_ z{x`8Eyqu}-yG=5e4iUEEK)pe%6>H7>l^d)qZc`*p6U~YCxORJJ@qeJG(H8hIT{c?Rn6)nLn``$N z`0&js{ENt+wb&RX(SPukyc~}|fibfBO!6oj%kD7i%r%5Ic?4zly(0a+wc1n$KHClX zmCF`mSOXp9LzX_XEV})^VCpR9MGbg5z~2Y{Gtq{*uX8B-xL0jC%Dx4z?tej%>zijBAR}$NBJ&)-e9l$C$ox zqZLbG_Bo~rRfcB7T3cp=-6J01Ul2)@Q8xcdqL~HAW{hG*tYSeZVDMsGQU5G>jjI#A zoiQZ)Y>b26U)PYU6T}QLdGxFqs=xbC_p^w1R%Yt&vgH2K4tl`<@XmO=ob%u*6f*j& z=FY=rJE#!0d<p7HYek|D6+^f#)T6JCJLB=mxHj5o% zIrS^3uW!GA~`ehzTP>4z=aVq6?&sutH~TS|sA{XNT*gS4KkbKkLLAI{oD z;?r!T-)F7F<;^)~{oR)RVopyMh@&>7ZyJ>I`$5_(gL3{v&0B;&6ZK87T*~@JkNMRg z?K)fpp0eVqH|L_~2WM{>@QZ8^e;9=HUkuJUf`CyKg?K~X&U$O`5AE7^J90ti5iicU ze~^}si#b=~;&w~U(}T39ET0Sj@dRrD-v~BW@J816vb9?%@uU0?>A9x>N9J6r(dSN) zY3Gat&U@7Y*ReGVmw(QBkk_wUaSdHGTk2R@WpCCegR-|~=X4F$9%gNS%{~*C-?r8w zd1DTf@6O4=^$R(3aQ*TCUVk{?DGP`fY=W?nsbdhY?;JE8*P8~Tp8E#p{M6KEEo5or zn9M)pgTYz=7k62^rh^HVw}HS@yJt2WS7p`kgFf{B}UjLxZ!U1D_z* ztF|2Ed}zzTb>|>ne|OM1xc;TMeqUVQ&sKW}=X{HE%NQhF|L^g?8u(ug{J*b(qgT?` z;hr9cj=biQwe@s~k1PDC!e0P~A^&6GP~d*t)Z!-vwTHX$`-!Aq#7#Gl%gNvW9ry9x z6mCR20A_|18p0KNv9dGpJL|w)nWPyBNRgNE(ft2E@{* zp$c*Q8)5(AJkbY6P})(@c^7FK-XZegHI@I;aTNW~kFP>c6LH#lq|8j8f-hNk%mPj? zEuHKbRl6kA5)6&2bqZD1)w%i1yfx^Cm`z?;hg)urkF5*(`~e6f zVR;>jqCrn{V;xHR&U^9Z zJoS0Rxux^9;hM5?-f^Dtq^|TsZd^U~2Tfl`{nM}Qe=IRpWYzh4an1V3_b&L}1)mMq`l0jv z0tq_bLGT^yhwn`j-x2T~>8IVdz{jy=yYyE_!FL?jdV565@jgzyUH&%gp~t1~MzT1_ z$MYkcIv@Sxa&F6bU+?tp{yvlY5M2K3;!FdDSRaqiak9RA z5;(~BHF+uiXC6+wLl*t^JTB;Dr*!}hu69Wb@O0rVlIPm5;PZUb#@A0dU-s>X-)Jpo!j zSl@FXj+Wr76Cm^V0oPBJzMb^nK+`vK4w2@Xbl@JuDxfWh*0zA&2L3Bl{*$0JSf9s`pm3U#RF|Q1nrhCqMhs0(wjj+va(ewidK*FBa(M4$!*2SYR((K_4>of137V z&>xkcFb>woy+GX_S^gKGOUg8D8S;lC|Bs;cgVujGrtCK4SKylYSAy0L=GUjtcasMH z6ae{u3i>GY&+#KYOig)^!as^VN)k9Yo2I4YyHn_uDfBl}=m%5iU!>6g1f5^0X)lZR zhG=8qa;h*MoM{(ee9NT^0wv`On`j5pZcm{f0^NF}w2uqXzu%bh*`LFpx2}Rs;hOo! zP5IQ{U>p8Z2kG-nG|T6kXy&g1%@~8naBZk|6X;w_Qtl5^zqf+!bBQ4k^fu(1`w!@a`BULy`#jidgB`HG#h~?r?XORvvCU;1%zrqPL~b& z76MrR6wqe*E10hi*5?Jitr1&4m}lhwmMNd|KW3uIzdMEgOA39lTp~mZEI-pkv;49Y zx-*6TX$t*s(A}$XqAc|{L@qg^e%3e9M6A}(4%_fdUz`Q zu`4D2dK1md8%#9oy94xkjITa^y`Z~m@LXDr=W!E1`#XgHse|P&OrbANp=(m;))e~Q z6nZ=8_0R{On^8Zpl>8CcfH(?$^VzAeM{Y={gYs-lp?{r1|B2-{YZ{*o&_1p}NaW~| z735<5nF(4y*!02_x*c>M?CVwJvHZ78`E36OpmQ<*ZczCVBIWh#pm9)q6 zKo9MavHz)91-_P&UrV}E)7}8E{U3l{4FBZ=O?|wU!vAN`2f%NE-xK{c-Al9mmJ~XY zLjTl6Q~qCr&hNnm4(jV|Q$Fh(C__e-uW9F#Hm$!?Q}Vr_bDQ9Yl)c?+%4d7ef!?}I z)A)=|*aPV9x!BoP>%r+)Z&}M(~o++R84LJ+{ zse^P0=*3tcrsA6NxE;?H<;oB>)Ehp_{#>x zKMVAMFybU--}i#nj|*|}oQbBqyHn`*Q|QlA=+n;8Ym@ex4_dz;8jicViJ-Sao>H(; zeyz?Q3V58IO}-_bCb!t=a(hDUct^9j zwFz%x*N>g(oKUPW!`)cl4r-!PlRIhkq2}gQdONN;Wulq#c~fV*+&aZ(^y@?`VpmPH zqQ5QazP8etKi?(x;jq(JH4E}zJI9&Nx8~$4mc>{58-rfbF61sB+ZgbB#yX41R>&9S zocW1294FSc@Lj%&LcSI6a#hv1=M=ORO_ZiO7X`}$lR*4!2GJEi^4ne z6E9n$2Tu0{@#Y^oP}SCN!x=g>9`9JX`Y(~L%d!Gjo77QZer9RQoF}btqI`+5Pz+!4 zSeSaEnkh&$^To>b+ke>`Rmnv80;SMq!W7b*AD`CzNlW%CK!yRBAYS4T zX@VzgxWD$NSI(U_wbJe83)&U#lnLb=FJ_dS2o?3Nd9#uSCf_tkkOz~4-60Xv5g$^) z@SvyaU}cfp-RKkB?rzoG-172jSB<-7>at0Cr2#phqCXQX^4Z2(F z+`)!W%Zjl}+BJ9CHFKuUt|&$IbDR@7gUb@1MEOgdu6fn&<;>))D3s#Yc;T!(L2rSx z+8a#&u*=n$xiY77p4U@fs(RF>Dph)Q7EkvE=dNt2^5Z>qe{j_tzQXQGVH!UPOcnez z%3H4(8^P6%FIeyimiT&0*t{r~p*IDtD&eH6>O_~!4)4EXe1zwbo}#$MH%)%C!#=p{ z+C6S4ti3TMKVNHYIx6)V|Tyv`3iRqhggPBLT zpwMJILR{TCTuV@*W`4TEqg|_1XBobWtM^Xh$C@eAFF(;T1S;~jfRWPzs$JIXYjNXE zZhw&8e`cE-YT;Y*{?aCX&4sTiRQ+GnF`+CSvin(+lveU zpf5(TLVjwK(sA8vT%sDWB4;q(vV6lO=sM)AUfy5BOY!Yp%D6zO&NK%t2dKQCY8WH9 zP2=to?n25~T-~^=#e;8Ay)tf4{T6xn^NKB$+om;po9kLvB~-?By`WfxO;ekie06Zp`jV^PU@ynKS*_Hkd_=(n&PIgt zSfMIf=+hX&mN$Bv>fvpq+vgxCZ|!u>@B|F6*01zzL#`GK3yrxUr&@-44&%0#1v5;+ ziKZziO_6b&3WX9TQCy2v7cq&q-msDh<({U1S`vjs1^N8L7wRj1cTn9z7>m4=q^QQH zhL#$rsi>+eDJfmBVCu9Aw+ff#Omi^+^kQqGjczu1z)sQ)X&Ob-@Zl)GzD@L}z?pLQ z14$UuWQt2#RxVFlz&Hz~@X~Y7suO-Av2-DRkd7vG#YeBz2#Ik|lt3sD(ATxpLYN?k z2kU~a8}JD$;^o9LhiNV+Mw&#yemq&hB-1$Su2#26>2gsQ0_VHyho}zWNZ}@{>a8;SpS!*Sy|eIO8c!&7(kGB`^jUDJ18|?>2aWQqP_;{L4e`#lUmoqg>>CxE+y^3nC%Pm zzBI8$h;?76U9KOA>6QNX0x~&i(LZ#?jTO1BA-RwGDv4Vy{JkCVM+nk(9F(;p@TEHB zsyZQMiOAHh3N3|g>U){!hWkYEmNJ9_{jTZ+UW~#BJCOIH+%~JmU-76@vmG@}QkJd9 zu0jz@uq)Eik1Em}L}HU8g9&g$Lf%89*#K|Fi=-9f<`jG1Ui?)oNY>BYxZKC|*7J?w zWDnu2#$Ucb&s2a^j=zqP&$?(GniqaD3ckO-#N-Gy>-?Z_E>ACaHK^#gSsRaz@J@ i7voM`{wWbYM8c5C0Z-PLGhcWz4DWc{PkrfK@&5yRX1t;R diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson/ext/bson.o b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/Release/obj.target/bson/ext/bson.o deleted file mode 100644 index 90cbde352ae26674a565ced5f0e358fcb672d7b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 49084 zcmc(I3t&{m_4nOJ!UEAnq!labs!>4^5(p9yZ6H}7$Rk1)QHUWV3yFp#WOo4{Ai4>% zu8XO!+Nu>5AODuBRjO8P{}C`iMUB)}tk_~j%NE;erHa;;@AsR>-Me=;fNj6;ySn`D z%$zxM=FFKhb7$t>T<@Pc(cy3y`f?a9BNZ}?-OVzkNWfrYrZGdLpMo&yd4DuR!dB=tq-dRRI&|nzJkY(hEZ>8>L$RX#SY^IjM*H84e&l9MS$fR4R166e zm)OC0)q!~AU>w{(s@fduG82kqF&&9#OnC;zst(3B$0A?Fjo53ksreq38golR$H;iq z!FWWlS=a&jXGK5Ck5_rx-&x-Of)iI98}I+p`_3W1jrEIHeYJgaB5)w)|2Q^4)y4mD z91%={Y}+@B{IkKJBJS^px`PdhL~Y~#Jltef4s}IrBC^M#18AooMF-~PN%Sxr3#WvR zm%0+b>aaYIM)XL$)O5<)0CK|6`PH z>INAojkKhB`R){VQ$}Shi2)5l#(`q4l%CR_?CMV~U>6G!)RCyM$}D@#0dVC+YxVs^p1z{k*geMHqL%hvWG zQyr$N7REf$PUo19BY%bw#jeYDLo5&7g6>L)PE)+Du%$oFq2*N_ z@IK=ofIdmF(LW=3(J%XW+fH&B#>z{gU-t91t-`H$?akmF{j$HeEeWsC;k{)6Ao}I? z-nJ0>{S99pLUQDi_%{>&H{&yhLI|q|kGUX9a-hhO<87PaB*UKwhP}LP+Zep%4c=N2;|CM87=7+vXL5-GG0iUmoji`&`_3J7|}YuINOMJF(); zHU2HiB84@gu3O@PH@(kv#sXWYd1%d=9uI7Z``?TO_Q*7MY@GDylz%(qjDG3V^1m&D zE^phVqFlGPeFppmIcuKE%^>goQ?>fRTn#WRhk6n6#$XI$=n~@=jbWqk)A5NOR3cH~ zMhBnmiC4Izou1f455&TF51dtX`P8Fp4hbR*mO_$j2l3Bsuld1$zyQOA#b8L6cZ10!X#)9Xb5vq54;&iP-+=TFke{@6~8wjlPWDDe-myjt69DD zuS17As3$0?7)tU!Q{hfb&Vhj5XLgGAdJKlgGGcl8T4N5>Ca?vnjhDD778U9@N828i zHX!*5N`s$~b<)b0_Fc@R6>p}Q6B}SpdILZh(E$CD*E54`XQd+y<>@^4{u3myXVg!4 z6STb|HFc@XDETvxc4RAG*tyxp%1mGy+8ShSIlj{_1}67`QS?iPxBYs|%TVyUhBP`^ z@VgjN(`76X^nGFfptlPUKxYyHsDYDqDnm$CtD5l2f3a?7b=Z8U>TC zrXbnzkIYgJqjq1?19^o$i%r|{`^DQbt5_v9;eCb^DzgpRc7{(#ufZtL ziGGzHeF~!4eW!KDo$;A&)Gjd%hOsRc(8a+t)3fYHtvYCEroR^B!BCC|{=MF3#H5X$ z5!NK91@Tghr5K#|!l#Vs4lTOjL;B>{iaoK<7h>EfpyZ_8&VfOn2-5#8>RW49d$Ps5mvoQ>?F0w$)+liA1VC z(T|Gb0ZsUGVFaO@T=I0 z9kI8dM_~cbBXxxw0hPm#F$F~aZAxWAr;KPj~fKjDgZtO9kyznk_YeJh(#3UPe=X7W<* z^*R*Irykv`NL?dRX+nq=C4-;?Q!FtId16lc)Prr%Xm11U_)Po$ ze`gO*+GR=E!&JUywqHS z61~l0)1&5z0!*q{mo0r@dV2Jr+q?QXu%Yp@FTxmv-xwsF7y8l*qF-Q|LGx6Fqh`rl zk&E`JE6m#Zwvc3|>C z%)J(3yf^ixRJshK+rkd5Ocl8wf~$IYp5Fx}?YrtPP~aD4eQfk&z+BQ#bVv7D3_H9q zy3*rVS&wR>XeL>#Sc-{yi)U}@?~l}9nvK(cBSF*_ZfrDE$F7llA44RDyTW&pepG<- zx#V_6+3UA|t&r;og+Re5)>JD@4YJ7??af4tvd}gD7g%653KW8|>E=UAEagH}fmH!; z5OLFdKzE2V1_Gp+t!TJ92AU-RL9?#{cX;{>SH?D5%uSEMNQr!?TNMjYn_>s|{puqa zdSr_xNi!TztU$0$j)#i&Z?NV3o;{~Y-XG+#%`$5QqC?O7|C-@{#dtU`i~n=49Mayo zatbZmbSIJN>`Nb08K6v%AmFkR2hx>h_I@;C?d_;#LVJ(dZB$U z>_;h&gUq?|G(l#%7g*6K3Nx8xcaZF+Osm|u!9Us z!7w=zN&~gqyY_lbZLXQowaQpH`&@6^08r2w%!C%em*v8G%uXPj8>?Y1*n0 z|6}Nz=lY-I^H^k;8b9b2UH&JbBssy*uJF0`&TtvLXcdg2`v#aoe|=Xh zZD$L((W60bVm1f)0s@2x+guWiMH4@QBTkR`LS zmt`9gl1mhmU9il3w~@(>U;>vb8d5Z(_NSDjq=p&Juk8=;hEKa%&~cMYkPdym|q ztEHUyv5+CniJPuC`DMx#R|(^g&;M9IljXqs5pZpdw#ggZtugOzG4DFeyF1LgyUn|M&AWB_&SJU;5C`WW z`>_82dITI_+pHYRjIMN^)z<%&HG?zPnqj^*g9}bGvxMzwCDd6n^v;^0BP~O{HA7z3 z440;5XtZYdM%E0cq-AKaW;i8lh7a71G+%3;30d<@Ny|e+x90I@ z&2wy89(D<9p7N}Dc49P6>42TbnrC{}Ja?t#VK=kp;WKBZ*4pR8Olt1}Hg-a9`D?IcChVH|hCy?uj3I8swAgpdi zukkp-=W>Gc7~I#s&wj^;q7*^eLy=rpm|%Ix3S&JcFJ6*b*#phH#1|fgHWWt*NR0x6mlQYY2W%)ly z%kM*eUvio?|8S8XCiXNa_ual5)8D(%HKcqutk`x4gNC>5MO4529Zb4imnAdnd|{msVyl#?~*T z0`p#J%!l4EZJU9ZWDxwp`DJp3?YFqG^a+D+?~BA}7uVl~9q@tnO~vUcppM767d%di z{bR(ME|D%ygz+Q;hT8C~w}HI)1UDZ1xDxA3mcxUze-^Gu@f{2aSAj=zGm;=-t*HB) z%(}NdDDxJj@)odm82FMm$-GY>FIaH%7~b-s!cMkB@-q>;zZ=5l`A}U7JqaZO(Q2Z< zr!7Tknu+N&4z_x|56&oGmaIw*4;Gx&iZqgv%b{3mt&3W;9HU{}F6h!4mvP=@9F`;! zRoojA{B`L-VM62~q*3Kz2Lwfm=M0plE$bmc0#dDaNyf~eGRepkvI=2;&+KVTso4hv zBQ^gK0jnT;nWv}3gNI9WTIOLHY^{^DjKp*7IU%%Z15ibLh1h29xmu`zY)_6ds3QE5 zu7g&(S~%RS<`IvhXW%(o4-HC5`;Y0cUh;fYQKD*JN{cDR+B&ur;Vcxkzf&}Ob>Z3e zE9dZhj5xoRh-^d0&@#j)ahJ&3Rt+7C4WHHG#^t^B*J!YKwO)$DQ2jbmEbD3XTrV*y zwG`L3oq5Fefix{W-QBE;S=&v5N$D%4NW(r85}pS)tUqq`?*d}iX}7T^@NTSXk9X|@ z2qT-=?cYmPTW2DHT?rh>NISJ77RW6}kg!(`YmI_ZzwXzM|9gZrp#&@!rZu5AMFTRieK!rtMse z>RjagZFCck0X^w$djQrF4?L+|JT2>|Ahuf8rQiY*$S5sKdD@tN1J-Dr-nBO|9ZS|# z8@V72s#L$&h}vT9Ii-7V8z0Cm1;+gA8-$z)&zFN0~=z2-QKmFDsTl<9JGPhSohsv zbwgMN7SiJ&l(KtsvI$3guD=_rz!&(}Ev;qaUx($Mebax1U4-Mrdk*8l-Vj- z=IteYAH%x+25;L>=$kafd zYKeowk$RKzL)^JBw?S}JZ6f?24-5VbMH$JL%z3S~e%|&?n0m>o4A3H3s^V?utW8#j z^D3b~OLe^MHzGZOqgZIAmlCsKdK+T9&Y8u0o|el z?t3(oYzWDqk7DX$Yd7@4h46(Ph1-*VM^Xocrd_U_VQ*B_JY0w_MrTdLhH7ufesv(UHb4 zavb@sW0SwhoQ&ZI&kdr-$Mtx z@?1}5#tnqJSvGKsYYy8_qK|IDAM{!uG(9RuEd$s+bVjNN0j)7Xl_{j-(+}}Y5 z1I^KG%BtOK)+_E`k7`+K8u#DJib5C4L(=>P3VTj($M7X=wx{TG(%RSCHiE?>?J?6I z$?*b>T8D07882K^0LP1U;s7M)Dc-efMfUU=pSNu*nC(wU`(jI3hlcgZX~J)$$q}xh z3?~Ws{2i<@s-`Q&_dLu1ObQmA(uvpt9f*vUa+IR{yOZxQVdGge0x56*Eb>O8NW&!K zBAsS+sBQm1H%?vwgQtz68z+~7r)2gH*b0M9^U!<>kPNvCBhsx5+qs$uVG}bvv0aHv z-P$u3?Vx`(TN2yV?Q8({KR3H)rOI*COOvV`W8#uy=MQ zC#366<@ucy(pJbfpoXpfb*Q$HoM?B=>-RsBS@u&|`TQBNTm52_jAu~3nK}R5zi0mk zXn*Kibmp_WK46E^k@9L zj;DGXoz6k;cwGCPYZ&;#h4>%1r{ea;oD=`x9>lNdo%3FAV^i;(KlcV3 z#|jpX>yRo`?zk%ebt4)`@IqWYM-2s`XGnQ3i0xO z>wLSKsPjlaqt{7jmZK{-s>rf0+BU1A!MjBH$rWTW{WfxA!F6 zujvCmKj@S5b4#6#h@*jRGW(k!^f5xXx!ZAv1Gn!wb2|FEZg=IZ>En9YHQ}*7uAjQU z=S0Hqa&sQ(*D?o;{E}a+SMoLPTGlXq~Z0= zi-W_L)+}nRX$lV?*3?`VJa5s}EyD|j6&4H|Q8YZ%T05MP)y)g84AzFLm$lZk468Sc zqt3S=vT$Lrm066df>-;_4EYvELSbK1bJ$ng+!U^9X!12h8XJ8Jt_}x7nKYqrO>5Y< ztRY@&%jfe9a4ebr>He3^=WFhda z^F^A1%Ue)y+%`oPFF*+>REcWlA1fL7%Cs;IAzx!K6!L}ZYnptcGU!dhuV194%)^&9 zv7r@O9^bl1sfRjW5cJjfCW!X%HMja^gl%aST~yu_4z@PcG)`;?HrACw zN#Vgmd>8o&&~!#^b8Bm)1w2B{O}@pbT+O1OZ=9~2ud%slk)cZq`C5ZZA`Puj+F)7S z;JTV{&5)tK=|YEt8=C5Z%YA71^yrj^b+bZ+&Az%|NH+{vh_)Ks8Vp4m!=WrJ!}X{o z+Y@Osgv}0aZV5Luqh3RX7>3kKja@HFD-BJsx2D>lVay7`2I{~mLxC;Lp@wk7(jeq# zEu_#!Wqm{I;mMC&JL-92bEFlvo>^{_tQBl3=nK(CE!J;W6Zp((Se})QFT42A%fhVm zO+MN2Wfzp1MAtF<;lfB$EwwAUOt>{t8-`^q#4m_0i|#UaDlB8TY>4u*;S(B~YFe)z zUJ8#gyllbD;KJdvu3o&Lxp8<|b7VndaCl_{Y%5%|xMldXhNg>yR}Y_7vm8)m+7^0o zCcl+awwauhQHYtZ4!@`9TBWBV?%}kt)NQUDyEA7ShV#wQ3dz(J?Kh z+A|GRy?Q9r63)stjlx=GegDALl>+=_A0I86>8!^%Vc#Vz5h2M;lp)-*aJ?w0wya}A(7Dz*nB}NS+&?HM$Q-ex4l(D`u>x`Dx=Ecq7Al+HW zsOB(8Ut_>rtBv}GMfKH21Bk@f(A;E%A`7ZTx-?{2VIv=c8Tp{guUlX=p<9PqYB1O! z(2U4hI7K5=+YGMKdl@Xuko7DtGoVe9xfNy;U68h3+Qn#T9610@qa3 z0urmu+(5zDl7w4iG?y~eNnrWahJ9j*MHHJ>9+cX48=^IPEVwTaO|^0~_8Z!9B+C=d zl`MF?UYDL;Y7E~C;I|=64JZLU4HiLRoVO!rwvVQ%Pp7#d8_lA0nsiO3)#v}U{<4+_ zWB>QtD^_Jurlr}aFL52YzW=@Q*^x?6D%<@qo<`ZPUW|gt9+OrLvN@6)l9j{1srvQE zPizC|43a({2Tu4&kbVjO0r}o>9$o@W+m$}R#aX=$I}4|CqNQ%E&EgY0nw3EC!~3La z@n0WdNqq?>KFiJ5p`G!No9Bo^ZXOyz1=Eh0P+#x{hf>)bT_8>KYPwKao;)v;sM;&f z%W(Y^)u-Y}M}9=ndw8BnysDid5{0Qmk2=>PIjBf+(uL)FZv*9xQ{#(I z)+p@+OQNioGnUKkR~);-5kl52k$U$Aw=4r{fGULJ(<8+NRZm9lh~_#{U;c^c_r>Ji zj?l0AUWs`gDUO$@wwT5c*n;gpC*B3~#9oVj=!*UeCo&#}kan8z+^EZu)`(dD)Lh

    hr`)igc55jh_cD@%lZvi^K|RqA1^zSZqmbYePX%S6=cJF5u(+ul`DB&~vgz z2yVBp$Z=_na2qm08rcqXj zr&=U#H*D3h%#ytA8@ZcfscVTPXWK7TU02Jwje3+0EPluftoB|I02@57mnR28P0ZWZ4RvFE9fdG=<>6JOQA44G}s1D~6s ze{gXF3DwUb3w!?L2Aq&b!SRyo>>Qw=ONB0q@{=R6Vprh2z z10Q3VCY*=*3JZt79p1I{WO&bXMfd&^e7XhWS^T)f1V;=H3aJBH_`K1yy@@iXlc$@V z{9=Zw9_72oRP6{i#_=j5#zcyOg4KA=v!Y5nG6r9I^6!mL6Y_DK=O>=@_#Ws6^{Q)f z3~zfgCnebKR!KCOd5D65C@XSDd_{M(mqVYLOxF+I@Alv$7BPs^&G!>phIVn%@Ph+1v`Nf|I^SO0`A*=)w-;~XR|)(t zzTD`xm~II-GP{`r%!y;{G=;a_DJnB z?}o2rKI^>ba>RAzIynRj{g@pIAEFSG1gWiTQ+v!HV5wPW_aal~#NL#vcghB0Z?S@1 zu@SLr+^k6QZ=4jE^PTkek5p#1abz;S|D+r$Kd{B4BFZJ=^e`sY_^8W`-u8PKtsP{0 z0->(lKFM}qM_2AK$$vl(m@lo^mNqXc#k)abvcx$KI*PVi+Pt@oZS^|wq3b7f@LE|w5)>7){@|> zG?XmdBc&K&thEYk!4$?FD~Mm~PT<@Yjz^IJW-}YJ^T0SxgAyuPGl|sEtvTpM%#&3- zYX&$KOoAf4MTvYE)jl)2*&X`?-}KnEj{|tJi^E@rHwe$6CeWB$D4lhlD|@_>iaKD0 zO5I&0Zg|5SRw}R&{IX?9UWY#1mFp5?obFK*S^ng{Z+sE`0tcdg1Y=6B#XqK<_eZtG z@I5F$+Bu)~mL_6}tutej2Qm0|;XT5IX1pNRXn z%oGi%<|~|#r>6XGe@-69a6uiT)c)UUC*M97rl4JWxJ|s-U~RHWT{*12dAf2BB|E^# zN?=ZZ*`^^;%Js>sfT+Cn((_?G-|Ln$Jxq=HB*Yp`%%k}7X;!K`fY~SJed;51ZY_$K zGG|N%y=xDkyux=9EA}K~xYO$l{ynr86hLjG?wSh*nN=%$8{W1DK^&j%=4=`V_Ahb7 zFdHQuKIAg3v!_8?bHV3c9Gj2>9~#9643SUFs4-0zH5gOXjU$c$90h$78N_@WoMq+Y ztElX8 zA=sj7=9)hvtvf%DYbk()DJt2hY=37?jawwBLtc(&KC(`WPLA67em#r@|>*7<%zSN*!bp( zlVO#<3&lY$v4fu342pkkeNC$_Lhe$I9b1NTNp|K^VHOcpexACi=E*f0fMzf?*t7`S z*HpBnnkI`$rKMwig98h2mMSuQDi6Oj`-+C0KWyZAg%J@sYKRO|>Oo3$$2ApGXBv*{ zJUB$l>!duldkw}H9>mHQF?hlP((;Y*$9meG6OPF#kLFBV?e^c~a$eEr828q$&Et=C zzdXM007dV(MsRd!UgX2_xKx27U@1P6qwYzoU6&u@mK+8msf_8k&f!10Jlqb(Z$5sV z`0;NRm}!%uu0WrcA@%0*eXBMDUly|OAq}p7;^+Pb{?WY3Z*ol(;ydT0WS`X62a-l97Bl}5JpR#<~F5f)9OEed4_D#@Hk2M)|vfKmYgC7sGn|x62>}bwv_d=_M z8HNuee9x#Ke&cX&($@(ChIL5U_(8l1{J8OI(oBoG7MtxXb;i$(cH&oVr9MTjIr3{ln%Wo?^%+Xc%qgkun(VQ7Kxm?Q~ zDXmWFOZw%YFM;x2%EtR1i_E2cg{|T36-n_QElIjE&+ETw(fPJG$C z1N1jo+jyGPv-1}g{*(GTZ&g8>CW;f~@$9$n*rO|te)nF`uLJ$xP_CO5)H+c5a(&^j z6gs&t^K>)M@z4d(eO+cA=Uht`2v!UdcP`{Q5wG0A_q#V*^1v^zcDbI&623-xZ+e zwU+;A9e}=y^v?NaD~bA(_gRMV6SR^0BMZIOS({a7{=T$HzBtl?oAJEoeyA=3KD!$7 zE0-ll|{dxZ_fj7UMDc0<_p=UQ}&gqx|a35zR%{at;e{>zkIBF za^Iz?!1%t-l2mAt(qWzA%cp6RudZVsM@s2PF2Zjg=sQuz+3->BftEVbH?DSLD0H1< z=}=`TM)>0$s+^q&+skooN)$}!qzm9E#YcpYYZZ`nIz;>IIJi=SSa(5*|11cW5ZR!#;Yw+)t65% z;`FhwAT{qV9ATZn`=^gsT3lF=n!=Y)pH)~rO3Wwyet*T%BXApu$-4hYL}eq6NHR)1 z)MSwHFyK+-E-T2MGc%gnIA6}Ci#Qh-^*>+Eq|>XOrQ#~%`Em|^v{?%aSdct`SXgjr zYeP7Q7avBB@Xx%M%1;$uP<_5^CMX_hXa`7yNMyx(6NpR~$;x-cD8W|>R)NREhULLJ zl}{EkYHBkMRUVe1G?n66!Ei-ubGVtuNv8$F_04tVS&gLd{E6x;er0fR3lD2TM8AJ& zWp%~HwG|U7f2wX4o03%W5E zhAnDsM$t!oaV~b8g_`3qhl>jQIRE7k!JnOub4^XhIYdu%Wk|zxRPY;l7&?}x`CWb( zI*#F*A7$;S+&T-t1&5()w9o|)Ll?HtEjkPxr?a{qe75dMzBUWpPY*+Pi-m5>Vd(C# z&~+V#?p_NWKVi|6eCsWAoLBWkx4}a9tHaQ3w9xT`MLqF*(L#6WVd%D4=>Eq-cPj2V z{Zdn4qZ{|%I3NG0tDnM?fSdyA>%c>pe+zWmxWesiX@1)gVdXdI8TCAecprY6-`{X=<@Y4`iM2Dt^V0nOf_p2!GVm)v9OXM5Kh2Nd zP`2{(O*D*GkghMy?>G=y`Gvu66^O_$A3x2HUx2jo`xyMP$;ZbGE58F4`OdQNn`PrS zf09~r0}U@N-wYeSe0(2_woCa2C?f>r3<7WaGCR{B%Db24XFr1AMIf zJ_bK@5%bdg{%PZP0Q{I%fnNcBn%`wMekJ92KiTA#YL5Wuto3z2m*U4ht@-`X#_vh+ z6KfZWre z*Y9GrKA{+De*7x7m0u(HbwUvGBVWz$0~^1BOAJG-OW63mXX7{Yd93k)hWz;atoe<{ zjkUfNGYw;>;ziK%T?jg>d|~h_Ksx!2!%y@3o{e9@EbzBv)co$W@mmjm6=);!<6eX2 z_oU0TJaZ!%y?OO7Y`YCdGAy?3XGe^6-9?kS*vCj*|D)&%WYD-~zF}aA z$h_DF{5_@Ubi%{)^_@i0@anRa``CP}t6LgK>vYw`5oTGsyeAPZL3prA z2bh-&Gm$+=uSATdKLL%^iMY{-BflzyS6b5fwSEXH`h2E5`Nr>)>T(Gwtzp%;D$0cttd~pFEEVHQd!nH{xv50l zxr|=6Fg<$ea+$qT#<`3>sYKkl%r83W^gLTS9F6628OK^e%4{R8D{V!D^!<%b zEmbBp|CW%x81~u1qix|uw(woH@Z+}d&uwAuUFnPE?X-pY$5eRz6Tj2(Bs9d*#H6oM z;qfBfC-&ZOD=wx#q|&d%{ptAcMHL>1@M*BeH*NeJmL_HXLKS{RM-(Vx(Vlpi6TovvR>E{^5Dm+PqhZ>7)^s8*)XH}T~@pPQ@;NFwI z$p1`blBKvm9sVX{F;S-9XAA#Kh3f@9X4;m)l=LUt!WS|OXTb5NKgOT6w)97B;Y|qZ zy?xRrZRsD|!iQ|(ftD^rc}}&3&t(|hoM!{bf4(h!kuAI&VSP4$^xsEVp95g{S%f>$ zXUbLmc9R}9ifO3epW8RJcTf64g!dr5Or-}9E`ZHpS}o|WMOa^y|2Bj>kj{ND=6~6i z{sF>zADij@EiQuj&qY}8t1~@dOTW?f-IUY~i7{@Hm9mVIhxa3@Cq-E&V22_5O4`oh`fv;m*PGECBiS!xUccdow&1VZC?GaI0mCPk!GgJ@RwBWBUCny&K`v zG5`CEjs7bY<{607v2rl&n;=vd`7N}CSJ}e%*}@xb;a6?peYUXEG9_erXQ?pPC{D+G zbh$14K3n(&72b)wr(?d_n}&&tYY3-fzIvGoGmiY1sW8tfoR0bHHe33iY+SBVN=i7@9Em!*ZzGQILnM`bVdYo3DWvEVEf zl1CzX%#o=}9&nI4Dk{h>ZCRmz=@A$n-Ju3m%!pJmN0;yD3P0k(2vz*ZjN*@&vqu54 zG=M%!@^$hQ@$h)YiH_8HjGlN+nmS{`_^H*^{Qg#XwXH)Hjubsg9_lEs%h>Wi@_~=) z%V&kB7FSm{G&eLggrT-9)-wucUzi}y%7{SqqUFo0TW~6$AE9grUtPVFb|nsR6w0G6 zv-s7oQdpIWS7%H{P7f~gqwhDU=rN|d80F_T^Ze6iS*w7TLQ`P@Z z&pF8&jF??qIJ!JEsj+!MO{4j>IDbV2YF>u?c=o_adaAbBe(MvUTt9oPqCD{sQdqSPl<{S7$3WeQR_vaBJ555@&$<-U$Ix<^@< zjnGOuLe1cpq0{1NHCF`>Ls_x3V&YP$*me>vb=+pwsA|8Z^ik9B`Q36@KnuRhi^Cg- ztBiSuEL&@dlVjDF*ELE_$xg&$W19Hr{3FRZ1W5}o)*lN@^)KlIMvk5o3{z~J&cYch zbkT4_Fyvn@+e{p162oss@kjKJN;k?Tibft)IqDpnD9t(%_zzF1NsCjWP?E#Bft1z? z3&gQFdDe~9N5UgEZQ!SE^QynbD^bht@SzbYMribaZX+qZ87MIosYp=q09o4v%{RCks+*(`T z3O?e{6w5Tdbm94;>!rS?+*&%qUm?0!MeUWMxNHB&;?W#t)o~UxGXHhKwYlB9o=K~##mUI#|}+q%2ic1t23vv#~ofUdEip8 zfHO|EwM}D=w)c`lSCS>E0(zj@7x`e_}HpiemOzHtnu(eu6 zAttUN)8f;gH7#e0)1@ga))h{DVZNu{0%fMTCONMwEW>Bn>w**bw*qWW2e2L4PyMWL zaj*%5Jf@v4!pOo|_+l@{Ks7^~h_Cvl3FQ}dkXD*_XuuG3WslWNFK{6~+{{wh`=m@` zDw`+BuY=3#GG(-iQBtGx2-m+LxTvA28pBF$y^6t%N{7``#fU;%d%+RnYr5H6EhPn} zL4gKw(p~h_R01a0(v#TY3kxN^WeNbn+4TH-0pgjZcxKJA(nd^Tk2K@MP($vOv&DF$ z8ilj_VmbZAyR~?AA&9@wP^OvpMA@x6p< zOZ)nT(-SXYRF9d}u&Aj9U-IWW4y+usqi2m6isW~(txwLGg$w5?->NX*mk{<(MeCT6 znLaq>Ked+Siz~wI^yHDHMPNdWzZ@a{!bpbmQl4K`N6?Qb;GZE;e^a44tY7L-)y-0) z^R+|P;+vLJSR}1OCyXdW#nSwqc%?%+KPVsz^AX5V+K2~#(J9i~ETt1+Xu|x*z((!g z4A1VNq;f_wm)TzqLpdV#zCz~r#{Wj9#fM4=zSMV!>yBlOz4lA zxpcaRf9EC5Wx&15iI?4DjVTXhj6TYj70MhI2cL35=a+{#{jzgIF>)@KN>G!#F?4wN zV+C;F=0Kv7=nLh{PIob7?{NPxtCX3iJnoS+m9hdon^J9`n$%j~JW@VFc7sBSI%8Rr zo<6Zu^;nXsf8hwFHH&sKg-Uyq=Igu)1%Zy5+t;>*!V&D;c#JxF*}uL%<$Tc3i57mqpj5*g&=%<>AQ%%R3z5s+u(o!FEgg?$+36nSRad9ghA@0@BFb&Q`T-~T4IGd$fe z9w*K)j6V@C!P#@{8#!^aRu6mc!ObVC*B5nAoc!FhM|{BaNfarJI)mlJ77;h zrpFjY`VWaW8^&%Ts5FY5pr_>pY`~h>1gvq3{NqPb3C`aDnO?^*(yt*x zzqb(Yhn&Q9XdfbM;3=S^z%VuuQSNqyuQ9#CFn*)bKUDF5CW8OpRQl&c@ac{70j&RV z#Od%u#0PL*lZbkLTVXlzL3~F=r8lYgf6@Amu4i z@l#a%d=+1>;#aEpJ5>C=D*h=IzfHw=tN0I9{8uX8eX=a~TSSyw0A#tvDt@|(uT$}j zD*i?lf47QXr{bSc@jF!fn=1ZKDxNRxk-rB>J)c5ExrHizjEbM3;uom+D^>g|6@R;m zzemOYM8$7W@jF!fA5{F`Rs26y{PCyCa?b)%o?#3_o-!4Gsfw>w@evijM#Xoi_y<+| zPgVTSRs8EJ{#76N&Nhr+6VXpTCYGbj)9^$8Iu6KwG@Jrqd@Y%mEjh{_%6ec>me21OhkLX!}uQ= z#`_FE1HVQ@{r<)H7Yw7%*$BS~zf1f%`XP{fiy3|v^LB!*w;%dyth>zmiX+XBqdknMxodZPwm_bDQ&IL04 zdKF$r+=g$nsrc6!2K{bgAB^{mhg}{ZBHuq0@?|dCWgjBS=}&~*BZ=jPv6u+C?l(1624BaUssb4h24f^RC3F@Z3ZEHT(mR<-e$KJL6IA@#g_i|1*iG z|7arQpQ^A<;Y~!;^HCzoe^JHnR`CakcVnJ447eWl3S@aBh^YSr6)sme9mx7GAfi2P zB7*OuM3l3Y2!3A>$#1xfF94G7JcgN%2zeh?_?*Jm6z)~{r9uy6VY?iou)o67h|>&X z2yr^vf%t%7Od#DE|lRDfUMUUDqO)Z+Vxr@ z+VyVY8;0>35$)w30el_yKwN_JxiwHeMfvo@SL{#qkM99Vb zlnYHM(&0))`Y%-aW|jVmN`FVC|3RhyQKf%EL^)p&(H^4SD6f!oD6g1^@-AdL$~%!5 zfd6LvrFf2J81ll+iFiM84*C!AGMt|#Lb)Q}Ts$`+%yFLj<2Vl!74nCOD3AJLdDk-? z<=v#xG4zOZn21P!NTvTsrDF&a>6=vgR+Ww-#XREc%6qi*i^(7Lt0&GsdcCxOp6$Ad zhk5=>-5kX(dbkLtltTPNvpU*Jl67gt+GZ+R_wyU#2 zwcAzlJsYmWoyU00N2U;AOx48a)Oz60RQt9W#s!SufN>Sb@>>~x*)YCCd;)%k*oE~w z;&Qd_h{l+}@HVy1xEWEg%FSqxr-8 zyO6}Eu}(;Yp2RvM5XRiLXI#L=;-7aEZbPiFX^uPl>M^#x^4I^P8&9pke$; zg+C)MfS+~&!KZ)-eRAEA<8MHPgGA(SSK;pvq3?%@$j@(=GXHxD|E2I$H{!u>I1zf7 zp~BU~Hhk-p*o|?72>pMD2)g@;(ATp>$j|oy$v-a#2!8oQv`2{w&r#teDtxyJKdr)_ z5Rw1bT#5N?m#47aOGNp?K2SbfsVJBIlKj#1qWxbYLauj-DE}Z4?Q;&XEoDFG`oew$ zpO^7m!*nqDG4WT@4>@5sdzrl8bJ&ho~hUX5|ZdWS2N#S~h zuMkoHw}`NhzpF6cA0@vrM5OzPu#YQMc&Q5CN?ZUrh>+n4BJA{4Vw+m84x-X*FN9Af z{u*{jTx1yhE&_2aaWU3kh-lZB75+hCZ?-e!7(|3#Ca7?L2)^}1w0oF{cE67Zx*rp- z#5|gaa{ojGpU;TU_ldoLD6a@eKQW$R)T5RNx@(AN=bKdecU3yS^UCyYmHt;1?&pzw zd_eL!pNRa6iS_VPM96hB5pwMyqWnKA98UXy9Zw>nK68kuPl8yF`5X~;@CgxtJlYBD zk$=sJd`AOWdiEk^r|1JCl!yKfD=nN3caO_4o zL^#`i#7g;%DF>XZNJl3U={1?@$CG}tVGIURAB7A<_o6=`d@tksVBVtPc`Tf*j!O9E5oR@np=8iKz4%A_ki$i9Y2&Ps99{ zc8|EB3a2W(PT@L*+Z6I!&7|kIn2A#rh83<;*sW05Kj`?*DCt`iu2U$!s{y_6J!YoA z3#6Vrv{QtS12SBu!qZfExe7l=0.6.19" - }, - "scripts": { - "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", - "test": "nodeunit ./test/node && TEST_NATIVE=TRUE nodeunit ./test/node" - }, - "licenses": [ - { - "type": "Apache License, Version 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0" - } - ], - "readme": "Javascript + C++ BSON parser\n============================\n\nThis BSON parser is primarily meant for usage with the `mongodb` node.js driver. However thanks to such wonderful tools at `onejs` we are able to package up a BSON parser that will work in the browser aswell. The current build is located in the `browser_build/bson.js` file.\n\nA simple example on how to use it\n\n \n \n \n \n \n \n\n It's got two simple methods to use in your application.\n\n * BSON.serialize(object, checkKeys, asBuffer, serializeFunctions)\n * @param {Object} object the Javascript object to serialize.\n * @param {Boolean} checkKeys the serializer will check if keys are valid.\n * @param {Boolean} asBuffer return the serialized object as a Buffer object **(ignore)**.\n * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**\n * @return {TypedArray/Array} returns a TypedArray or Array depending on what your browser supports\n \n * BSON.deserialize(buffer, options, isArray)\n * Options\n * **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.\n * **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.\n * **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.\n * @param {TypedArray/Array} a TypedArray/Array containing the BSON data\n * @param {Object} [options] additional options used for the deserialization.\n * @param {Boolean} [isArray] ignore used for recursive parsing.\n * @return {Object} returns the deserialized Javascript Object.\n", - "readmeFilename": "README.md", - "homepage": "https://github.com/mongodb/js-bson", - "_id": "bson@0.2.2", - "dist": { - "shasum": "f7b4239566d10de61b7debd2b53c7e3ad67036ef" - }, - "_from": "bson@0.2.2", - "_resolved": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz" -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/LICENSE b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/README.md b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/README.md deleted file mode 100644 index 7428b0d0e..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/README.md +++ /dev/null @@ -1,4 +0,0 @@ -kerberos -======== - -Kerberos library for node.js \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/binding.gyp b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/binding.gyp deleted file mode 100644 index 027a70f20..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/binding.gyp +++ /dev/null @@ -1,41 +0,0 @@ -{ - 'targets': [ - { - 'target_name': 'kerberos', - 'cflags!': [ '-fno-exceptions' ], - 'cflags_cc!': [ '-fno-exceptions' ], - 'conditions': [ - ['OS=="mac"', { - 'sources': [ 'lib/kerberos.cc', 'lib/worker.cc', 'lib/kerberosgss.c', 'lib/base64.c', 'lib/kerberos_context.cc' ], - 'defines': [ - '__MACOSX_CORE__' - ], - 'xcode_settings': { - 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES' - }, - "link_settings": { - "libraries": [ - "-lkrb5" - ] - } - }], - ['OS=="win"', { - 'sources': [ - 'lib/win32/kerberos.cc', - 'lib/win32/base64.c', - 'lib/win32/worker.cc', - 'lib/win32/kerberos_sspi.c', - 'lib/win32/wrappers/security_buffer.cc', - 'lib/win32/wrappers/security_buffer_descriptor.cc', - 'lib/win32/wrappers/security_context.cc', - 'lib/win32/wrappers/security_credentials.cc' - ], - "link_settings": { - "libraries": [ - ] - } - }] - ] - } - ] -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d deleted file mode 100644 index 0bc320607..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/kerberos.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/kerberos.node := rm -rf "Release/kerberos.node" && cp -af "Release/obj.target/kerberos.node" "Release/kerberos.node" diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d deleted file mode 100644 index ba6dec80c..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/.deps/Release/obj.target/kerberos.node.d +++ /dev/null @@ -1 +0,0 @@ -cmd_Release/obj.target/kerberos.node := flock ./Release/linker.lock g++ -shared -pthread -rdynamic -m32 -Wl,-soname=kerberos.node -o Release/obj.target/kerberos.node -Wl,--start-group -Wl,--end-group diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/kerberos.node deleted file mode 100644 index 5d872403f814739cf4d6d7459d9c5872bbd73c14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6608 zcmeHLYiv|S6rRf#C{jy>MG=ckl{O&rAnA$vZxeu#4Kso{*-SPDV|7B zCZxS&nh+)TDrQ45c+#kS0;CDM2|M{TFqw7?B{>X#%@t@PYulFdezX-pa~L+Vwm$+p z;NM7(-wbSB_>f3}Xx!8$YVyZkjlcKv#QjIIbL+mCvGIkuUpCXG{rTR- zQSlZrV-$ne)}Om{{JWR-eZK3l@(=s3lwSPwr&2VJqw)Zn2c5AsnNSvK07<+aTtWp@ zCa&b4gbbTR3U7f7+OZVkD<&H>T}%;oLayW_LDNNv5VqZwbW^tHN7BA+3p<{Q`@-&o zfT^Q9LN$_z?{$Q|VYl7vbj3a2N!KMJp67TX5pVCXJlCogwiAu`5%g*IJT)4cQc(|!V%dDl=a?U_a%)nc#58vMqt6|~1N#4EjDfDv{6vd{>Z4P^8 z5|VX^62lBrazS99jP@c*w6|n073a__w)PB7yGMw=?A3fezkkr5m|KC?+#nx+;*mX7 zymbf?lpkP%@`t&Gr2JvzNyMmL_y)c6hdH|#%@@uBw)LKWv?ccrV9(IoSkUaHvS*La z0u-5Tec2Heue2}A{n=Yokvoc(zARV1f1m<-{~;641)rU~J|TPAs2TRm{twtYJ81Mu zxBSaldZ<1%!|Cro%5r!kd#Y^f_H9Fm-;%S?&@;41h7b20s)YakPyCsE*=y7W{i*%3 ztv6heKbt)j-nwn5ue@UDAW6;tNBH#&{V<0)ngtz+^6%~+pxXcbd^w-XMftORlVHe< zFm5on1L&9gmho$%biD{i1HMdiJV?%PQuvxj=;s6f#iAKp_K#3=}d@$Uq?jg$(@P8Nk2YJjc|M zI{(A22J`Y30rNJ%JJs!wyi5IhJ@2lB6Yqf6AbI~wK;8!VGnKk&P=BC@h5K?MeEPc1S<1WK3%**LSS0RK{x0 z#1qlQ@u-j}7V%=jitb6Fr;>hJS#~>VFYczs0UNfolc2v+b|!pb$@{70JKfObE!A>j zc4s=0bnI9ZJv0(lhwr96gi+$D4j3_1BpL5O7#H2~u;iHX=4#>MicW4a>7;y}ntzqJ z_Mj%B)-+hF&>csw+Je`!->_a-Ho?6@u7b`zL82ddC7An#e%wRkgV1S%Dkb+)Etq}j z$2~{pUeiQh(nDbG8~Sn2kh#BfT(*H>HijA)^QB4q zy@3Yp7sgo)|AXJV0l#wimBUYu!7;Ty0G&BzTv>C2-|2vzew@b%u#U?bOAdbgzSBfM z@=37v<9FH!{Pg{vzS{5OfS;`0U;${;r1i6CV0@Vu6EWd;tGt|p592NYljyflsq#r1 znct-xOH(!czSaPeJ|s%6t)`}#LYU~ty})rfKXjFQIXGSd1(>KF1onUmG@kgqKvGpz zb@mljk$J`yRuOoX6;|2dnN?U;SLNAMm@BHzpu+qnQfEzJl|fzq3(2=oXGht264e<| znBP3=EGVp2o;6m5`HQFaw!-{HRQp(AYyuGXD=8#1hdnulxhlx-80I$-_ShKaFP{31 zqzsMjDDE3lNMLb5C8iX>tY+Sz5PnI6>rjOHDBplJXp$kO$#vE9GcdnOwCnj9*flGF zYeyG)0=|hK24*~tYareekY&J332_W^FyMc4eEyP@1>*Mr9~sT3fqxV)0te&22CNSV z;~xY3Juv=h;8|nil?k2)ogV}F`4(8`*TDN<1m^i95wpy%^T+xzYx2Q*aB1KL=AqBR z05*YZfWuTk?EfHeaJ&sS`ELi-`;YZ!(UJB&KhxQ1b)Z&U*SfjcZd%{c%F467ZXDWB z7cfO#yDQxYkWp+qUcIu~>T-O0XNT>_GN~O_d$*qLSWnNjW06!efin5J z$2YCnxV}!$ diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/linker.lock deleted file mode 100644 index e69de29bb..000000000 diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/Release/obj.target/kerberos.node deleted file mode 100644 index 5d872403f814739cf4d6d7459d9c5872bbd73c14..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6608 zcmeHLYiv|S6rRf#C{jy>MG=ckl{O&rAnA$vZxeu#4Kso{*-SPDV|7B zCZxS&nh+)TDrQ45c+#kS0;CDM2|M{TFqw7?B{>X#%@t@PYulFdezX-pa~L+Vwm$+p z;NM7(-wbSB_>f3}Xx!8$YVyZkjlcKv#QjIIbL+mCvGIkuUpCXG{rTR- zQSlZrV-$ne)}Om{{JWR-eZK3l@(=s3lwSPwr&2VJqw)Zn2c5AsnNSvK07<+aTtWp@ zCa&b4gbbTR3U7f7+OZVkD<&H>T}%;oLayW_LDNNv5VqZwbW^tHN7BA+3p<{Q`@-&o zfT^Q9LN$_z?{$Q|VYl7vbj3a2N!KMJp67TX5pVCXJlCogwiAu`5%g*IJT)4cQc(|!V%dDl=a?U_a%)nc#58vMqt6|~1N#4EjDfDv{6vd{>Z4P^8 z5|VX^62lBrazS99jP@c*w6|n073a__w)PB7yGMw=?A3fezkkr5m|KC?+#nx+;*mX7 zymbf?lpkP%@`t&Gr2JvzNyMmL_y)c6hdH|#%@@uBw)LKWv?ccrV9(IoSkUaHvS*La z0u-5Tec2Heue2}A{n=Yokvoc(zARV1f1m<-{~;641)rU~J|TPAs2TRm{twtYJ81Mu zxBSaldZ<1%!|Cro%5r!kd#Y^f_H9Fm-;%S?&@;41h7b20s)YakPyCsE*=y7W{i*%3 ztv6heKbt)j-nwn5ue@UDAW6;tNBH#&{V<0)ngtz+^6%~+pxXcbd^w-XMftORlVHe< zFm5on1L&9gmho$%biD{i1HMdiJV?%PQuvxj=;s6f#iAKp_K#3=}d@$Uq?jg$(@P8Nk2YJjc|M zI{(A22J`Y30rNJ%JJs!wyi5IhJ@2lB6Yqf6AbI~wK;8!VGnKk&P=BC@h5K?MeEPc1S<1WK3%**LSS0RK{x0 z#1qlQ@u-j}7V%=jitb6Fr;>hJS#~>VFYczs0UNfolc2v+b|!pb$@{70JKfObE!A>j zc4s=0bnI9ZJv0(lhwr96gi+$D4j3_1BpL5O7#H2~u;iHX=4#>MicW4a>7;y}ntzqJ z_Mj%B)-+hF&>csw+Je`!->_a-Ho?6@u7b`zL82ddC7An#e%wRkgV1S%Dkb+)Etq}j z$2~{pUeiQh(nDbG8~Sn2kh#BfT(*H>HijA)^QB4q zy@3Yp7sgo)|AXJV0l#wimBUYu!7;Ty0G&BzTv>C2-|2vzew@b%u#U?bOAdbgzSBfM z@=37v<9FH!{Pg{vzS{5OfS;`0U;${;r1i6CV0@Vu6EWd;tGt|p592NYljyflsq#r1 znct-xOH(!czSaPeJ|s%6t)`}#LYU~ty})rfKXjFQIXGSd1(>KF1onUmG@kgqKvGpz zb@mljk$J`yRuOoX6;|2dnN?U;SLNAMm@BHzpu+qnQfEzJl|fzq3(2=oXGht264e<| znBP3=EGVp2o;6m5`HQFaw!-{HRQp(AYyuGXD=8#1hdnulxhlx-80I$-_ShKaFP{31 zqzsMjDDE3lNMLb5C8iX>tY+Sz5PnI6>rjOHDBplJXp$kO$#vE9GcdnOwCnj9*flGF zYeyG)0=|hK24*~tYareekY&J332_W^FyMc4eEyP@1>*Mr9~sT3fqxV)0te&22CNSV z;~xY3Juv=h;8|nil?k2)ogV}F`4(8`*TDN<1m^i95wpy%^T+xzYx2Q*aB1KL=AqBR z05*YZfWuTk?EfHeaJ&sS`ELi-`;YZ!(UJB&KhxQ1b)Z&U*SfjcZd%{c%F467ZXDWB z7cfO#yDQxYkWp+qUcIu~>T-O0XNT>_GN~O_d$*qLSWnNjW06!efin5J z$2YCnxV}!$ diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/config.gypi b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/config.gypi deleted file mode 100644 index 91ce77280..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/config.gypi +++ /dev/null @@ -1,115 +0,0 @@ -# Do not edit. File was generated by node-gyp's "configure" step -{ - "target_defaults": { - "cflags": [], - "default_configuration": "Release", - "defines": [], - "include_dirs": [], - "libraries": [] - }, - "variables": { - "clang": 0, - "gcc_version": 46, - "host_arch": "ia32", - "node_install_npm": "true", - "node_prefix": "/usr/local", - "node_shared_cares": "false", - "node_shared_http_parser": "false", - "node_shared_libuv": "false", - "node_shared_openssl": "false", - "node_shared_v8": "false", - "node_shared_zlib": "false", - "node_tag": "", - "node_unsafe_optimizations": 0, - "node_use_dtrace": "false", - "node_use_etw": "false", - "node_use_openssl": "true", - "node_use_perfctr": "false", - "node_use_systemtap": "false", - "python": "/usr/bin/python", - "target_arch": "ia32", - "v8_enable_gdbjit": 0, - "v8_no_strict_aliasing": 1, - "v8_use_snapshot": "true", - "nodedir": "/home/vagrant/.node-gyp/0.10.24", - "copy_dev_lib": "true", - "standalone_static_library": 1, - "cache_lock_stale": "60000", - "sign_git_tag": "", - "always_auth": "", - "user_agent": "node/v0.10.24 linux ia32", - "bin_links": "true", - "key": "", - "description": "true", - "fetch_retries": "2", - "heading": "npm", - "user": "", - "force": "", - "cache_min": "10", - "init_license": "ISC", - "editor": "vi", - "rollback": "true", - "cache_max": "null", - "userconfig": "/home/vagrant/.npmrc", - "engine_strict": "", - "init_author_name": "", - "init_author_url": "", - "tmp": "/home/vagrant/tmp", - "depth": "null", - "save_dev": "", - "usage": "", - "https_proxy": "", - "onload_script": "", - "rebuild_bundle": "true", - "save_bundle": "", - "shell": "/bin/bash", - "prefix": "/usr/local", - "registry": "https://registry.npmjs.org/", - "browser": "", - "cache_lock_wait": "10000", - "save_optional": "", - "searchopts": "", - "versions": "", - "cache": "/home/vagrant/.npm", - "ignore_scripts": "", - "searchsort": "name", - "version": "", - "local_address": "", - "viewer": "man", - "color": "true", - "fetch_retry_mintimeout": "10000", - "umask": "18", - "fetch_retry_maxtimeout": "60000", - "message": "%s", - "cert": "", - "global": "", - "link": "", - "save": "", - "unicode": "true", - "long": "", - "production": "", - "unsafe_perm": "true", - "node_version": "v0.10.24", - "tag": "latest", - "git_tag_version": "true", - "shrinkwrap": "true", - "fetch_retry_factor": "10", - "npat": "", - "proprietary_attribs": "true", - "strict_ssl": "true", - "username": "", - "dev": "", - "globalconfig": "/usr/local/etc/npmrc", - "init_module": "/home/vagrant/.npm-init.js", - "parseable": "", - "globalignorefile": "/usr/local/etc/npmignore", - "cache_lock_retries": "10", - "group": "1000", - "init_author_email": "", - "searchexclude": "", - "git": "git", - "optional": "true", - "email": "", - "json": "" - } -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk deleted file mode 100644 index d2ce49b92..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/build/kerberos.target.mk +++ /dev/null @@ -1,42 +0,0 @@ -# This file is generated by gyp; do not edit. - -TOOLSET := target -TARGET := kerberos -### Rules for final target. -LDFLAGS_Debug := \ - -pthread \ - -rdynamic \ - -m32 - -LDFLAGS_Release := \ - -pthread \ - -rdynamic \ - -m32 - -LIBS := - -$(obj).target/kerberos.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) -$(obj).target/kerberos.node: LIBS := $(LIBS) -$(obj).target/kerberos.node: TOOLSET := $(TOOLSET) -$(obj).target/kerberos.node: FORCE_DO_CMD - $(call do_cmd,solink_module) - -all_deps += $(obj).target/kerberos.node -# Add target alias -.PHONY: kerberos -kerberos: $(builddir)/kerberos.node - -# Copy this to the executable output path. -$(builddir)/kerberos.node: TOOLSET := $(TOOLSET) -$(builddir)/kerberos.node: $(obj).target/kerberos.node FORCE_DO_CMD - $(call do_cmd,copy) - -all_deps += $(builddir)/kerberos.node -# Short alias for building this executable. -.PHONY: kerberos.node -kerberos.node: $(obj).target/kerberos.node $(builddir)/kerberos.node - -# Add executable to "all" target. -.PHONY: all -all: $(builddir)/kerberos.node - diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/index.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/index.js deleted file mode 100644 index b8c853276..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/index.js +++ /dev/null @@ -1,6 +0,0 @@ -// Get the Kerberos library -module.exports = require('./lib/kerberos'); -// Set up the auth processes -module.exports['processes'] = { - MongoAuthProcess: require('./lib/auth_processes/mongodb').MongoAuthProcess -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js deleted file mode 100644 index f1e9231a7..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/auth_processes/mongodb.js +++ /dev/null @@ -1,281 +0,0 @@ -var format = require('util').format; - -var MongoAuthProcess = function(host, port, service_name) { - // Check what system we are on - if(process.platform == 'win32') { - this._processor = new Win32MongoProcessor(host, port, service_name); - } else { - this._processor = new UnixMongoProcessor(host, port, service_name); - } -} - -MongoAuthProcess.prototype.init = function(username, password, callback) { - this._processor.init(username, password, callback); -} - -MongoAuthProcess.prototype.transition = function(payload, callback) { - this._processor.transition(payload, callback); -} - -/******************************************************************* - * - * Win32 SSIP Processor for MongoDB - * - *******************************************************************/ -var Win32MongoProcessor = function(host, port, service_name) { - this.host = host; - this.port = port - // SSIP classes - this.ssip = require("../kerberos").SSIP; - // Set up first transition - this._transition = Win32MongoProcessor.first_transition(this); - // Set up service name - service_name = service_name || "mongodb"; - // Set up target - this.target = format("%s/%s", service_name, host); - // Number of retries - this.retries = 10; -} - -Win32MongoProcessor.prototype.init = function(username, password, callback) { - var self = this; - // Save the values used later - this.username = username; - this.password = password; - // Aquire credentials - this.ssip.SecurityCredentials.aquire_kerberos(username, password, function(err, security_credentials) { - if(err) return callback(err); - // Save credentials - self.security_credentials = security_credentials; - // Callback with success - callback(null); - }); -} - -Win32MongoProcessor.prototype.transition = function(payload, callback) { - if(this._transition == null) return callback(new Error("Transition finished")); - this._transition(payload, callback); -} - -Win32MongoProcessor.first_transition = function(self) { - return function(payload, callback) { - self.ssip.SecurityContext.initialize( - self.security_credentials, - self.target, - payload, function(err, security_context) { - if(err) return callback(err); - - // If no context try again until we have no more retries - if(!security_context.hasContext) { - if(self.retries == 0) return callback(new Error("Failed to initialize security context")); - // Update the number of retries - self.retries = self.retries - 1; - // Set next transition - return self.transition(payload, callback); - } - - // Set next transition - self._transition = Win32MongoProcessor.second_transition(self); - self.security_context = security_context; - // Return the payload - callback(null, security_context.payload); - }); - } -} - -Win32MongoProcessor.second_transition = function(self) { - return function(payload, callback) { - // Perform a step - self.security_context.initialize(self.target, payload, function(err, security_context) { - if(err) return callback(err); - - // If no context try again until we have no more retries - if(!security_context.hasContext) { - if(self.retries == 0) return callback(new Error("Failed to initialize security context")); - // Update the number of retries - self.retries = self.retries - 1; - // Set next transition - self._transition = Win32MongoProcessor.first_transition(self); - // Retry - return self.transition(payload, callback); - } - - // Set next transition - self._transition = Win32MongoProcessor.third_transition(self); - // Return the payload - callback(null, security_context.payload); - }); - } -} - -Win32MongoProcessor.third_transition = function(self) { - return function(payload, callback) { - var messageLength = 0; - // Get the raw bytes - var encryptedBytes = new Buffer(payload, 'base64'); - var encryptedMessage = new Buffer(messageLength); - // Copy first byte - encryptedBytes.copy(encryptedMessage, 0, 0, messageLength); - // Set up trailer - var securityTrailerLength = encryptedBytes.length - messageLength; - var securityTrailer = new Buffer(securityTrailerLength); - // Copy the bytes - encryptedBytes.copy(securityTrailer, 0, messageLength, securityTrailerLength); - - // Types used - var SecurityBuffer = self.ssip.SecurityBuffer; - var SecurityBufferDescriptor = self.ssip.SecurityBufferDescriptor; - - // Set up security buffers - var buffers = [ - new SecurityBuffer(SecurityBuffer.DATA, encryptedBytes) - , new SecurityBuffer(SecurityBuffer.STREAM, securityTrailer) - ]; - - // Set up the descriptor - var descriptor = new SecurityBufferDescriptor(buffers); - - // Decrypt the data - self.security_context.decryptMessage(descriptor, function(err, security_context) { - if(err) return callback(err); - - var length = 4; - if(self.username != null) { - length += self.username.length; - } - - var bytesReceivedFromServer = new Buffer(length); - bytesReceivedFromServer[0] = 0x01; // NO_PROTECTION - bytesReceivedFromServer[1] = 0x00; // NO_PROTECTION - bytesReceivedFromServer[2] = 0x00; // NO_PROTECTION - bytesReceivedFromServer[3] = 0x00; // NO_PROTECTION - - if(self.username != null) { - var authorization_id_bytes = new Buffer(self.username, 'utf8'); - authorization_id_bytes.copy(bytesReceivedFromServer, 4, 0); - } - - self.security_context.queryContextAttributes(0x00, function(err, sizes) { - if(err) return callback(err); - - var buffers = [ - new SecurityBuffer(SecurityBuffer.TOKEN, new Buffer(sizes.securityTrailer)) - , new SecurityBuffer(SecurityBuffer.DATA, bytesReceivedFromServer) - , new SecurityBuffer(SecurityBuffer.PADDING, new Buffer(sizes.blockSize)) - ] - - var descriptor = new SecurityBufferDescriptor(buffers); - - self.security_context.encryptMessage(descriptor, 0x80000001, function(err, security_context) { - if(err) return callback(err); - callback(null, security_context.payload); - }); - }); - }); - } -} - -/******************************************************************* - * - * UNIX MIT Kerberos processor - * - *******************************************************************/ -var UnixMongoProcessor = function(host, port, service_name) { - this.host = host; - this.port = port - // SSIP classes - this.Kerberos = require("../kerberos").Kerberos; - this.kerberos = new this.Kerberos(); - service_name = service_name || "mongodb"; - // Set up first transition - this._transition = UnixMongoProcessor.first_transition(this); - // Set up target - this.target = format("%s@%s", service_name, host); - // Number of retries - this.retries = 10; -} - -UnixMongoProcessor.prototype.init = function(username, password, callback) { - var self = this; - this.username = username; - this.password = password; - // Call client initiate - this.kerberos.authGSSClientInit( - self.target - , this.Kerberos.GSS_C_MUTUAL_FLAG, function(err, context) { - self.context = context; - // Return the context - callback(null, context); - }); -} - -UnixMongoProcessor.prototype.transition = function(payload, callback) { - if(this._transition == null) return callback(new Error("Transition finished")); - this._transition(payload, callback); -} - -UnixMongoProcessor.first_transition = function(self) { - return function(payload, callback) { - self.kerberos.authGSSClientStep(self.context, '', function(err, result) { - if(err) return callback(err); - // Set up the next step - self._transition = UnixMongoProcessor.second_transition(self); - // Return the payload - callback(null, self.context.response); - }) - } -} - -UnixMongoProcessor.second_transition = function(self) { - return function(payload, callback) { - self.kerberos.authGSSClientStep(self.context, payload, function(err, result) { - if(err && self.retries == 0) return callback(err); - // Attempt to re-establish a context - if(err) { - // Adjust the number of retries - self.retries = self.retries - 1; - // Call same step again - return self.transition(payload, callback); - } - - // Set up the next step - self._transition = UnixMongoProcessor.third_transition(self); - // Return the payload - callback(null, self.context.response || ''); - }); - } -} - -UnixMongoProcessor.third_transition = function(self) { - return function(payload, callback) { - // GSS Client Unwrap - self.kerberos.authGSSClientUnwrap(self.context, payload, function(err, result) { - if(err) return callback(err, false); - - // Wrap the response - self.kerberos.authGSSClientWrap(self.context, self.context.response, self.username, function(err, result) { - if(err) return callback(err, false); - // Set up the next step - self._transition = UnixMongoProcessor.fourth_transition(self); - // Return the payload - callback(null, self.context.response); - }); - }); - } -} - -UnixMongoProcessor.fourth_transition = function(self) { - return function(payload, callback) { - // Clean up context - self.kerberos.authGSSClientClean(self.context, function(err, result) { - if(err) return callback(err, false); - // Set the transition to null - self._transition = null; - // Callback with valid authentication - callback(null, true); - }); - } -} - -// Set the process -exports.MongoAuthProcess = MongoAuthProcess; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.c b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.c deleted file mode 100644 index 4232106b9..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.c +++ /dev/null @@ -1,120 +0,0 @@ -/** - * Copyright (c) 2006-2008 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include "base64.h" - -#include -#include - -// base64 tables -static char basis_64[] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; -static signed char index_64[128] = -{ - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63, - 52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1,-1,-1,-1, - -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14, - 15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1, - -1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40, - 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1 -}; -#define CHAR64(c) (((c) < 0 || (c) > 127) ? -1 : index_64[(c)]) - -// base64_encode : base64 encode -// -// value : data to encode -// vlen : length of data -// (result) : new char[] - c-str of result -char *base64_encode(const unsigned char *value, int vlen) -{ - char *result = (char *)malloc((vlen * 4) / 3 + 5); - char *out = result; - while (vlen >= 3) - { - *out++ = basis_64[value[0] >> 2]; - *out++ = basis_64[((value[0] << 4) & 0x30) | (value[1] >> 4)]; - *out++ = basis_64[((value[1] << 2) & 0x3C) | (value[2] >> 6)]; - *out++ = basis_64[value[2] & 0x3F]; - value += 3; - vlen -= 3; - } - if (vlen > 0) - { - *out++ = basis_64[value[0] >> 2]; - unsigned char oval = (value[0] << 4) & 0x30; - if (vlen > 1) oval |= value[1] >> 4; - *out++ = basis_64[oval]; - *out++ = (vlen < 2) ? '=' : basis_64[(value[1] << 2) & 0x3C]; - *out++ = '='; - } - *out = '\0'; - - return result; -} - -// base64_decode : base64 decode -// -// value : c-str to decode -// rlen : length of decoded result -// (result) : new unsigned char[] - decoded result -unsigned char *base64_decode(const char *value, int *rlen) -{ - *rlen = 0; - int c1, c2, c3, c4; - - int vlen = strlen(value); - unsigned char *result =(unsigned char *)malloc((vlen * 3) / 4 + 1); - unsigned char *out = result; - - while (1) - { - if (value[0]==0) - return result; - c1 = value[0]; - if (CHAR64(c1) == -1) - goto base64_decode_error;; - c2 = value[1]; - if (CHAR64(c2) == -1) - goto base64_decode_error;; - c3 = value[2]; - if ((c3 != '=') && (CHAR64(c3) == -1)) - goto base64_decode_error;; - c4 = value[3]; - if ((c4 != '=') && (CHAR64(c4) == -1)) - goto base64_decode_error;; - - value += 4; - *out++ = (CHAR64(c1) << 2) | (CHAR64(c2) >> 4); - *rlen += 1; - if (c3 != '=') - { - *out++ = ((CHAR64(c2) << 4) & 0xf0) | (CHAR64(c3) >> 2); - *rlen += 1; - if (c4 != '=') - { - *out++ = ((CHAR64(c3) << 6) & 0xc0) | CHAR64(c4); - *rlen += 1; - } - } - } - -base64_decode_error: - *result = 0; - *rlen = 0; - return result; -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.h deleted file mode 100644 index f0e1f0616..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/base64.h +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Copyright (c) 2006-2008 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -char *base64_encode(const unsigned char *value, int vlen); -unsigned char *base64_decode(const char *value, int *rlen); diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc deleted file mode 100644 index 08eda82b2..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.cc +++ /dev/null @@ -1,563 +0,0 @@ -#include "kerberos.h" -#include -#include "worker.h" -#include "kerberos_context.h" - -#ifndef ARRAY_SIZE -# define ARRAY_SIZE(a) (sizeof((a)) / sizeof((a)[0])) -#endif - -Persistent Kerberos::constructor_template; - -// Call structs -typedef struct AuthGSSClientCall { - uint32_t flags; - char *uri; -} AuthGSSClientCall; - -typedef struct AuthGSSClientStepCall { - KerberosContext *context; - char *challenge; -} AuthGSSClientStepCall; - -typedef struct AuthGSSClientUnwrapCall { - KerberosContext *context; - char *challenge; -} AuthGSSClientUnwrapCall; - -typedef struct AuthGSSClientWrapCall { - KerberosContext *context; - char *challenge; - char *user_name; -} AuthGSSClientWrapCall; - -typedef struct AuthGSSClientCleanCall { - KerberosContext *context; -} AuthGSSClientCleanCall; - -// VException object (causes throw in calling code) -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -} - -Kerberos::Kerberos() : ObjectWrap() { -} - -void Kerberos::Initialize(v8::Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(Kerberos::New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("Kerberos")); - - // Set up method for the Kerberos instance - NODE_SET_PROTOTYPE_METHOD(constructor_template, "authGSSClientInit", AuthGSSClientInit); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "authGSSClientStep", AuthGSSClientStep); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "authGSSClientUnwrap", AuthGSSClientUnwrap); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "authGSSClientWrap", AuthGSSClientWrap); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "authGSSClientClean", AuthGSSClientClean); - - // Set the symbol - target->ForceSet(String::NewSymbol("Kerberos"), constructor_template->GetFunction()); -} - -Handle Kerberos::New(const Arguments &args) { - // Create a Kerberos instance - Kerberos *kerberos = new Kerberos(); - // Return the kerberos object - kerberos->Wrap(args.This()); - return args.This(); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientInit -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authGSSClientInit(Worker *worker) { - gss_client_state *state; - gss_client_response *response; - - // Allocate state - state = (gss_client_state *)malloc(sizeof(gss_client_state)); - - // Unpack the parameter data struct - AuthGSSClientCall *call = (AuthGSSClientCall *)worker->parameters; - // Start the kerberos client - response = authenticate_gss_client_init(call->uri, call->flags, state); - - // Release the parameter struct memory - free(call->uri); - free(call); - - // If we have an error mark worker as having had an error - if(response->return_code == AUTH_GSS_ERROR) { - worker->error = TRUE; - worker->error_code = response->return_code; - worker->error_message = response->message; - } else { - worker->return_value = state; - } - - // Free structure - free(response); -} - -static Handle _map_authGSSClientInit(Worker *worker) { - HandleScope scope; - - KerberosContext *context = KerberosContext::New(); - context->state = (gss_client_state *)worker->return_value; - // Persistent _context = Persistent::New(context->handle_); - return scope.Close(context->handle_); -} - -// Initialize method -Handle Kerberos::AuthGSSClientInit(const Arguments &args) { - HandleScope scope; - - // Ensure valid call - if(args.Length() != 3) return VException("Requires a service string uri, integer flags and a callback function"); - if(args.Length() == 3 && !args[0]->IsString() && !args[1]->IsInt32() && !args[2]->IsFunction()) - return VException("Requires a service string uri, integer flags and a callback function"); - - Local service = args[0]->ToString(); - // Convert uri string to c-string - char *service_str = (char *)calloc(service->Utf8Length() + 1, sizeof(char)); - // Write v8 string to c-string - service->WriteUtf8(service_str); - - // Allocate a structure - AuthGSSClientCall *call = (AuthGSSClientCall *)calloc(1, sizeof(AuthGSSClientCall)); - call->flags =args[1]->ToInt32()->Uint32Value(); - call->uri = service_str; - - // Unpack the callback - Local callback = Local::Cast(args[2]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authGSSClientInit; - worker->mapper = _map_authGSSClientInit; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Kerberos::Process, (uv_after_work_cb)Kerberos::After); - // Return no value as it's callback based - return scope.Close(Undefined()); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientStep -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authGSSClientStep(Worker *worker) { - gss_client_state *state; - gss_client_response *response; - char *challenge; - - // Unpack the parameter data struct - AuthGSSClientStepCall *call = (AuthGSSClientStepCall *)worker->parameters; - // Get the state - state = call->context->state; - challenge = call->challenge; - - // Check what kind of challenge we have - if(call->challenge == NULL) { - challenge = (char *)""; - } - - // Perform authentication step - response = authenticate_gss_client_step(state, challenge); - - // If we have an error mark worker as having had an error - if(response->return_code == AUTH_GSS_ERROR) { - worker->error = TRUE; - worker->error_code = response->return_code; - worker->error_message = response->message; - } else { - worker->return_code = response->return_code; - } - - // Free up structure - if(call->challenge != NULL) free(call->challenge); - free(call); - free(response); -} - -static Handle _map_authGSSClientStep(Worker *worker) { - HandleScope scope; - // Return the return code - return scope.Close(Int32::New(worker->return_code)); -} - -// Initialize method -Handle Kerberos::AuthGSSClientStep(const Arguments &args) { - HandleScope scope; - - // Ensure valid call - if(args.Length() != 2 && args.Length() != 3) return VException("Requires a GSS context, optional challenge string and callback function"); - if(args.Length() == 2 && !KerberosContext::HasInstance(args[0])) return VException("Requires a GSS context, optional challenge string and callback function"); - if(args.Length() == 3 && !KerberosContext::HasInstance(args[0]) && !args[1]->IsString()) return VException("Requires a GSS context, optional challenge string and callback function"); - - // Challenge string - char *challenge_str = NULL; - // Let's unpack the parameters - Local object = args[0]->ToObject(); - KerberosContext *kerberos_context = KerberosContext::Unwrap(object); - - // If we have a challenge string - if(args.Length() == 3) { - // Unpack the challenge string - Local challenge = args[1]->ToString(); - // Convert uri string to c-string - challenge_str = (char *)calloc(challenge->Utf8Length() + 1, sizeof(char)); - // Write v8 string to c-string - challenge->WriteUtf8(challenge_str); - } - - // Allocate a structure - AuthGSSClientStepCall *call = (AuthGSSClientStepCall *)calloc(1, sizeof(AuthGSSClientCall)); - call->context = kerberos_context; - call->challenge = challenge_str; - - // Unpack the callback - Local callback = Local::Cast(args[2]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authGSSClientStep; - worker->mapper = _map_authGSSClientStep; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Kerberos::Process, (uv_after_work_cb)Kerberos::After); - - // Return no value as it's callback based - return scope.Close(Undefined()); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientUnwrap -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authGSSClientUnwrap(Worker *worker) { - gss_client_response *response; - char *challenge; - - // Unpack the parameter data struct - AuthGSSClientUnwrapCall *call = (AuthGSSClientUnwrapCall *)worker->parameters; - challenge = call->challenge; - - // Check what kind of challenge we have - if(call->challenge == NULL) { - challenge = (char *)""; - } - - // Perform authentication step - response = authenticate_gss_client_unwrap(call->context->state, challenge); - - // If we have an error mark worker as having had an error - if(response->return_code == AUTH_GSS_ERROR) { - worker->error = TRUE; - worker->error_code = response->return_code; - worker->error_message = response->message; - } else { - worker->return_code = response->return_code; - } - - // Free up structure - if(call->challenge != NULL) free(call->challenge); - free(call); - free(response); -} - -static Handle _map_authGSSClientUnwrap(Worker *worker) { - HandleScope scope; - // Return the return code - return scope.Close(Int32::New(worker->return_code)); -} - -// Initialize method -Handle Kerberos::AuthGSSClientUnwrap(const Arguments &args) { - HandleScope scope; - - // Ensure valid call - if(args.Length() != 2 && args.Length() != 3) return VException("Requires a GSS context, optional challenge string and callback function"); - if(args.Length() == 2 && !KerberosContext::HasInstance(args[0]) && !args[1]->IsFunction()) return VException("Requires a GSS context, optional challenge string and callback function"); - if(args.Length() == 3 && !KerberosContext::HasInstance(args[0]) && !args[1]->IsString() && !args[2]->IsFunction()) return VException("Requires a GSS context, optional challenge string and callback function"); - - // Challenge string - char *challenge_str = NULL; - // Let's unpack the parameters - Local object = args[0]->ToObject(); - KerberosContext *kerberos_context = KerberosContext::Unwrap(object); - - // If we have a challenge string - if(args.Length() == 3) { - // Unpack the challenge string - Local challenge = args[1]->ToString(); - // Convert uri string to c-string - challenge_str = (char *)calloc(challenge->Utf8Length() + 1, sizeof(char)); - // Write v8 string to c-string - challenge->WriteUtf8(challenge_str); - } - - // Allocate a structure - AuthGSSClientUnwrapCall *call = (AuthGSSClientUnwrapCall *)calloc(1, sizeof(AuthGSSClientUnwrapCall)); - call->context = kerberos_context; - call->challenge = challenge_str; - - // Unpack the callback - Local callback = args.Length() == 3 ? Local::Cast(args[2]) : Local::Cast(args[1]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authGSSClientUnwrap; - worker->mapper = _map_authGSSClientUnwrap; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Kerberos::Process, (uv_after_work_cb)Kerberos::After); - - // Return no value as it's callback based - return scope.Close(Undefined()); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientWrap -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authGSSClientWrap(Worker *worker) { - gss_client_response *response; - char *user_name = NULL; - - // Unpack the parameter data struct - AuthGSSClientWrapCall *call = (AuthGSSClientWrapCall *)worker->parameters; - user_name = call->user_name; - - // Check what kind of challenge we have - if(call->user_name == NULL) { - user_name = (char *)""; - } - - // Perform authentication step - response = authenticate_gss_client_wrap(call->context->state, call->challenge, user_name); - - // If we have an error mark worker as having had an error - if(response->return_code == AUTH_GSS_ERROR) { - worker->error = TRUE; - worker->error_code = response->return_code; - worker->error_message = response->message; - } else { - worker->return_code = response->return_code; - } - - // Free up structure - if(call->challenge != NULL) free(call->challenge); - if(call->user_name != NULL) free(call->user_name); - free(call); - free(response); -} - -static Handle _map_authGSSClientWrap(Worker *worker) { - HandleScope scope; - // Return the return code - return scope.Close(Int32::New(worker->return_code)); -} - -// Initialize method -Handle Kerberos::AuthGSSClientWrap(const Arguments &args) { - HandleScope scope; - - // Ensure valid call - if(args.Length() != 3 && args.Length() != 4) return VException("Requires a GSS context, the result from the authGSSClientResponse after authGSSClientUnwrap, optional user name and callback function"); - if(args.Length() == 3 && !KerberosContext::HasInstance(args[0]) && !args[1]->IsString() && !args[2]->IsFunction()) return VException("Requires a GSS context, the result from the authGSSClientResponse after authGSSClientUnwrap, optional user name and callback function"); - if(args.Length() == 4 && !KerberosContext::HasInstance(args[0]) && !args[1]->IsString() && !args[2]->IsString() && !args[2]->IsFunction()) return VException("Requires a GSS context, the result from the authGSSClientResponse after authGSSClientUnwrap, optional user name and callback function"); - - // Challenge string - char *challenge_str = NULL; - char *user_name_str = NULL; - - // Let's unpack the kerberos context - Local object = args[0]->ToObject(); - KerberosContext *kerberos_context = KerberosContext::Unwrap(object); - - // Unpack the challenge string - Local challenge = args[1]->ToString(); - // Convert uri string to c-string - challenge_str = (char *)calloc(challenge->Utf8Length() + 1, sizeof(char)); - // Write v8 string to c-string - challenge->WriteUtf8(challenge_str); - - // If we have a user string - if(args.Length() == 4) { - // Unpack user name - Local user_name = args[2]->ToString(); - // Convert uri string to c-string - user_name_str = (char *)calloc(user_name->Utf8Length() + 1, sizeof(char)); - // Write v8 string to c-string - user_name->WriteUtf8(user_name_str); - } - - // Allocate a structure - AuthGSSClientWrapCall *call = (AuthGSSClientWrapCall *)calloc(1, sizeof(AuthGSSClientWrapCall)); - call->context = kerberos_context; - call->challenge = challenge_str; - call->user_name = user_name_str; - - // Unpack the callback - Local callback = args.Length() == 4 ? Local::Cast(args[3]) : Local::Cast(args[2]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authGSSClientWrap; - worker->mapper = _map_authGSSClientWrap; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Kerberos::Process, (uv_after_work_cb)Kerberos::After); - - // Return no value as it's callback based - return scope.Close(Undefined()); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientWrap -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authGSSClientClean(Worker *worker) { - gss_client_response *response; - - // Unpack the parameter data struct - AuthGSSClientCleanCall *call = (AuthGSSClientCleanCall *)worker->parameters; - - // Perform authentication step - response = authenticate_gss_client_clean(call->context->state); - - // If we have an error mark worker as having had an error - if(response->return_code == AUTH_GSS_ERROR) { - worker->error = TRUE; - worker->error_code = response->return_code; - worker->error_message = response->message; - } else { - worker->return_code = response->return_code; - } - - // Free up structure - free(call); - free(response); -} - -static Handle _map_authGSSClientClean(Worker *worker) { - HandleScope scope; - // Return the return code - return scope.Close(Int32::New(worker->return_code)); -} - -// Initialize method -Handle Kerberos::AuthGSSClientClean(const Arguments &args) { - HandleScope scope; - - // // Ensure valid call - if(args.Length() != 2) return VException("Requires a GSS context and callback function"); - if(!KerberosContext::HasInstance(args[0]) && !args[1]->IsFunction()) return VException("Requires a GSS context and callback function"); - - // Let's unpack the kerberos context - Local object = args[0]->ToObject(); - KerberosContext *kerberos_context = KerberosContext::Unwrap(object); - - // Allocate a structure - AuthGSSClientCleanCall *call = (AuthGSSClientCleanCall *)calloc(1, sizeof(AuthGSSClientCleanCall)); - call->context = kerberos_context; - - // Unpack the callback - Local callback = Local::Cast(args[1]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authGSSClientClean; - worker->mapper = _map_authGSSClientClean; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Kerberos::Process, (uv_after_work_cb)Kerberos::After); - - // Return no value as it's callback based - return scope.Close(Undefined()); -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// UV Lib callbacks -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -void Kerberos::Process(uv_work_t* work_req) { - // Grab the worker - Worker *worker = static_cast(work_req->data); - // Execute the worker code - worker->execute(worker); -} - -void Kerberos::After(uv_work_t* work_req) { - // Grab the scope of the call from Node - v8::HandleScope scope; - - // Get the worker reference - Worker *worker = static_cast(work_req->data); - - // If we have an error - if(worker->error) { - v8::Local err = v8::Exception::Error(v8::String::New(worker->error_message)); - Local obj = err->ToObject(); - obj->Set(NODE_PSYMBOL("code"), Int32::New(worker->error_code)); - v8::Local args[2] = { err, v8::Local::New(v8::Null()) }; - // Execute the error - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } else { - // // Map the data - v8::Handle result = worker->mapper(worker); - // Set up the callback with a null first - v8::Handle args[2] = { v8::Local::New(v8::Null()), result}; - // Wrap the callback function call in a TryCatch so that we can call - // node's FatalException afterwards. This makes it possible to catch - // the exception from JavaScript land using the - // process.on('uncaughtException') event. - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } - - // Clean up the memory - worker->callback.Dispose(); - delete worker; -} - -// Exporting function -extern "C" void init(Handle target) { - HandleScope scope; - Kerberos::Initialize(target); - KerberosContext::Initialize(target); -} - -NODE_MODULE(kerberos, init); diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.h deleted file mode 100644 index 061995700..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.h +++ /dev/null @@ -1,47 +0,0 @@ -#ifndef KERBEROS_H -#define KERBEROS_H - -#include -#include -#include -#include - -#include -#include - -extern "C" { - #include "kerberosgss.h" -} - -using namespace v8; -using namespace node; - -class Kerberos : public ObjectWrap { - -public: - Kerberos(); - ~Kerberos() {}; - - // Constructor used for creating new Kerberos objects from C++ - static Persistent constructor_template; - - // Initialize function for the object - static void Initialize(Handle target); - - // Method available - static Handle AuthGSSClientInit(const Arguments &args); - static Handle AuthGSSClientStep(const Arguments &args); - static Handle AuthGSSClientUnwrap(const Arguments &args); - static Handle AuthGSSClientWrap(const Arguments &args); - static Handle AuthGSSClientClean(const Arguments &args); - -private: - static Handle New(const Arguments &args); - - // Handles the uv calls - static void Process(uv_work_t* work_req); - // Called after work is done - static void After(uv_work_t* work_req); -}; - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.js deleted file mode 100644 index b1a701ba0..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos.js +++ /dev/null @@ -1,91 +0,0 @@ -var kerberos = require('../build/Release/kerberos') - , KerberosNative = kerberos.Kerberos; - -var Kerberos = function() { - this._native_kerberos = new KerberosNative(); -} - -Kerberos.prototype.authGSSClientInit = function(uri, flags, callback) { - return this._native_kerberos.authGSSClientInit(uri, flags, callback); -} - -Kerberos.prototype.authGSSClientStep = function(context, challenge, callback) { - if(typeof challenge == 'function') { - callback = challenge; - challenge = ''; - } - - return this._native_kerberos.authGSSClientStep(context, challenge, callback); -} - -Kerberos.prototype.authGSSClientUnwrap = function(context, challenge, callback) { - if(typeof challenge == 'function') { - callback = challenge; - challenge = ''; - } - - return this._native_kerberos.authGSSClientUnwrap(context, challenge, callback); -} - -Kerberos.prototype.authGSSClientWrap = function(context, challenge, user_name, callback) { - if(typeof user_name == 'function') { - callback = user_name; - user_name = ''; - } - - return this._native_kerberos.authGSSClientWrap(context, challenge, user_name, callback); -} - -Kerberos.prototype.authGSSClientClean = function(context, callback) { - return this._native_kerberos.authGSSClientClean(context, callback); -} - -Kerberos.prototype.acquireAlternateCredentials = function(user_name, password, domain) { - return this._native_kerberos.acquireAlternateCredentials(user_name, password, domain); -} - -Kerberos.prototype.prepareOutboundPackage = function(principal, inputdata) { - return this._native_kerberos.prepareOutboundPackage(principal, inputdata); -} - -Kerberos.prototype.decryptMessage = function(challenge) { - return this._native_kerberos.decryptMessage(challenge); -} - -Kerberos.prototype.encryptMessage = function(challenge) { - return this._native_kerberos.encryptMessage(challenge); -} - -Kerberos.prototype.queryContextAttribute = function(attribute) { - if(typeof attribute != 'number' && attribute != 0x00) throw new Error("Attribute not supported"); - return this._native_kerberos.queryContextAttribute(attribute); -} - -// Some useful result codes -Kerberos.AUTH_GSS_CONTINUE = 0; -Kerberos.AUTH_GSS_COMPLETE = 1; - -// Some useful gss flags -Kerberos.GSS_C_DELEG_FLAG = 1; -Kerberos.GSS_C_MUTUAL_FLAG = 2; -Kerberos.GSS_C_REPLAY_FLAG = 4; -Kerberos.GSS_C_SEQUENCE_FLAG = 8; -Kerberos.GSS_C_CONF_FLAG = 16; -Kerberos.GSS_C_INTEG_FLAG = 32; -Kerberos.GSS_C_ANON_FLAG = 64; -Kerberos.GSS_C_PROT_READY_FLAG = 128; -Kerberos.GSS_C_TRANS_FLAG = 256; - -// Export Kerberos class -exports.Kerberos = Kerberos; - -// If we have SSPI (windows) -if(kerberos.SecurityCredentials) { - // Put all SSPI classes in it's own namespace - exports.SSIP = { - SecurityCredentials: require('./win32/wrappers/security_credentials').SecurityCredentials - , SecurityContext: require('./win32/wrappers/security_context').SecurityContext - , SecurityBuffer: require('./win32/wrappers/security_buffer').SecurityBuffer - , SecurityBufferDescriptor: require('./win32/wrappers/security_buffer_descriptor').SecurityBufferDescriptor - } -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc deleted file mode 100644 index 7a5f4eb8d..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.cc +++ /dev/null @@ -1,74 +0,0 @@ -#include "kerberos_context.h" - -Persistent KerberosContext::constructor_template; - -KerberosContext::KerberosContext() : ObjectWrap() { -} - -KerberosContext::~KerberosContext() { -} - -KerberosContext* KerberosContext::New() { - HandleScope scope; - - Local obj = constructor_template->GetFunction()->NewInstance(); - KerberosContext *kerberos_context = ObjectWrap::Unwrap(obj); - - return kerberos_context; -} - -Handle KerberosContext::New(const Arguments &args) { - HandleScope scope; - // Create code object - KerberosContext *kerberos_context = new KerberosContext(); - // Wrap it - kerberos_context->Wrap(args.This()); - // Return the object - return args.This(); -} - -static Persistent response_symbol; - -void KerberosContext::Initialize(Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("KerberosContext")); - - // Property symbols - response_symbol = NODE_PSYMBOL("response"); - - // Getter for the response - constructor_template->InstanceTemplate()->SetAccessor(response_symbol, ResponseGetter); - - // Set up the Symbol for the Class on the Module - target->Set(String::NewSymbol("KerberosContext"), constructor_template->GetFunction()); -} - -// -// Response Setter / Getter -Handle KerberosContext::ResponseGetter(Local property, const AccessorInfo& info) { - HandleScope scope; - gss_client_state *state; - - // Unpack the object - KerberosContext *context = ObjectWrap::Unwrap(info.Holder()); - // Let's grab the response - state = context->state; - // No state no response - if(state == NULL || state->response == NULL) return scope.Close(Null()); - // Return the response - return scope.Close(String::New(state->response)); -} - - - - - - - - - diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h deleted file mode 100644 index 8becef6d3..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberos_context.h +++ /dev/null @@ -1,48 +0,0 @@ -#ifndef KERBEROS_CONTEXT_H -#define KERBEROS_CONTEXT_H - -#include -#include -#include -#include - -#include -#include - -extern "C" { - #include "kerberosgss.h" -} - -using namespace v8; -using namespace node; - -class KerberosContext : public ObjectWrap { - -public: - KerberosContext(); - ~KerberosContext(); - - static inline bool HasInstance(Handle val) { - if (!val->IsObject()) return false; - Local obj = val->ToObject(); - return constructor_template->HasInstance(obj); - }; - - // Constructor used for creating new Kerberos objects from C++ - static Persistent constructor_template; - - // Initialize function for the object - static void Initialize(Handle target); - - // Public constructor - static KerberosContext* New(); - - // Handle to the kerberos context - gss_client_state *state; - -private: - static Handle New(const Arguments &args); - - static Handle ResponseGetter(Local property, const AccessorInfo& info); -}; -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c deleted file mode 100644 index f17003db3..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.c +++ /dev/null @@ -1,666 +0,0 @@ -/** - * Copyright (c) 2006-2010 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include "kerberosgss.h" - -#include "base64.h" - -#include -#include -#include -#include - -static void set_gss_error(OM_uint32 err_maj, OM_uint32 err_min); - -/*extern PyObject *GssException_class; -extern PyObject *KrbException_class; - -char* server_principal_details(const char* service, const char* hostname) -{ - char match[1024]; - int match_len = 0; - char* result = NULL; - - int code; - krb5_context kcontext; - krb5_keytab kt = NULL; - krb5_kt_cursor cursor = NULL; - krb5_keytab_entry entry; - char* pname = NULL; - - // Generate the principal prefix we want to match - snprintf(match, 1024, "%s/%s@", service, hostname); - match_len = strlen(match); - - code = krb5_init_context(&kcontext); - if (code) - { - PyErr_SetObject(KrbException_class, Py_BuildValue("((s:i))", - "Cannot initialize Kerberos5 context", code)); - return NULL; - } - - if ((code = krb5_kt_default(kcontext, &kt))) - { - PyErr_SetObject(KrbException_class, Py_BuildValue("((s:i))", - "Cannot get default keytab", code)); - goto end; - } - - if ((code = krb5_kt_start_seq_get(kcontext, kt, &cursor))) - { - PyErr_SetObject(KrbException_class, Py_BuildValue("((s:i))", - "Cannot get sequence cursor from keytab", code)); - goto end; - } - - while ((code = krb5_kt_next_entry(kcontext, kt, &entry, &cursor)) == 0) - { - if ((code = krb5_unparse_name(kcontext, entry.principal, &pname))) - { - PyErr_SetObject(KrbException_class, Py_BuildValue("((s:i))", - "Cannot parse principal name from keytab", code)); - goto end; - } - - if (strncmp(pname, match, match_len) == 0) - { - result = malloc(strlen(pname) + 1); - strcpy(result, pname); - krb5_free_unparsed_name(kcontext, pname); - krb5_free_keytab_entry_contents(kcontext, &entry); - break; - } - - krb5_free_unparsed_name(kcontext, pname); - krb5_free_keytab_entry_contents(kcontext, &entry); - } - - if (result == NULL) - { - PyErr_SetObject(KrbException_class, Py_BuildValue("((s:i))", - "Principal not found in keytab", -1)); - } - -end: - if (cursor) - krb5_kt_end_seq_get(kcontext, kt, &cursor); - if (kt) - krb5_kt_close(kcontext, kt); - krb5_free_context(kcontext); - - return result; -} -*/ -gss_client_response *authenticate_gss_client_init(const char* service, long int gss_flags, gss_client_state* state) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc name_token = GSS_C_EMPTY_BUFFER; - gss_client_response *response = NULL; - int ret = AUTH_GSS_COMPLETE; - - state->server_name = GSS_C_NO_NAME; - state->context = GSS_C_NO_CONTEXT; - state->gss_flags = gss_flags; - state->username = NULL; - state->response = NULL; - - // Import server name first - name_token.length = strlen(service); - name_token.value = (char *)service; - - maj_stat = gss_import_name(&min_stat, &name_token, gss_krb5_nt_service_name, &state->server_name); - - if (GSS_ERROR(maj_stat)) { - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } - -end: - if(response == NULL) { - response = calloc(1, sizeof(gss_client_response)); - response->return_code = ret; - } - - return response; -} - -gss_client_response *authenticate_gss_client_clean(gss_client_state *state) { - OM_uint32 min_stat; - int ret = AUTH_GSS_COMPLETE; - gss_client_response *response = NULL; - - if(state->context != GSS_C_NO_CONTEXT) - gss_delete_sec_context(&min_stat, &state->context, GSS_C_NO_BUFFER); - - if(state->server_name != GSS_C_NO_NAME) - gss_release_name(&min_stat, &state->server_name); - - if(state->username != NULL) { - free(state->username); - state->username = NULL; - } - - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - if(response == NULL) { - response = calloc(1, sizeof(gss_client_response)); - response->return_code = ret; - } - - return response; -} - -gss_client_response *authenticate_gss_client_step(gss_client_state* state, const char* challenge) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - gss_client_response *response = NULL; - - // Always clear out the old response - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if (challenge && *challenge) { - int len; - input_token.value = base64_decode(challenge, &len); - input_token.length = len; - } - - // Do GSSAPI step - maj_stat = gss_init_sec_context(&min_stat, - GSS_C_NO_CREDENTIAL, - &state->context, - state->server_name, - GSS_C_NO_OID, - (OM_uint32)state->gss_flags, - 0, - GSS_C_NO_CHANNEL_BINDINGS, - &input_token, - NULL, - &output_token, - NULL, - NULL); - - if ((maj_stat != GSS_S_COMPLETE) && (maj_stat != GSS_S_CONTINUE_NEEDED)) { - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } - - ret = (maj_stat == GSS_S_COMPLETE) ? AUTH_GSS_COMPLETE : AUTH_GSS_CONTINUE; - // Grab the client response to send back to the server - if(output_token.length) { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length); - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - - // Try to get the user name if we have completed all GSS operations - if (ret == AUTH_GSS_COMPLETE) { - gss_name_t gssuser = GSS_C_NO_NAME; - maj_stat = gss_inquire_context(&min_stat, state->context, &gssuser, NULL, NULL, NULL, NULL, NULL, NULL); - - if(GSS_ERROR(maj_stat)) { - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } - - gss_buffer_desc name_token; - name_token.length = 0; - maj_stat = gss_display_name(&min_stat, gssuser, &name_token, NULL); - - if(GSS_ERROR(maj_stat)) { - if(name_token.value) - gss_release_buffer(&min_stat, &name_token); - gss_release_name(&min_stat, &gssuser); - - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } else { - state->username = (char *)malloc(name_token.length + 1); - strncpy(state->username, (char*) name_token.value, name_token.length); - state->username[name_token.length] = 0; - gss_release_buffer(&min_stat, &name_token); - gss_release_name(&min_stat, &gssuser); - } - } - -end: - if(output_token.value) - gss_release_buffer(&min_stat, &output_token); - if(input_token.value) - free(input_token.value); - - if(response == NULL) { - response = calloc(1, sizeof(gss_client_response)); - response->return_code = ret; - } - - // Return the response - return response; -} - -gss_client_response *authenticate_gss_client_unwrap(gss_client_state *state, const char *challenge) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - gss_client_response *response = NULL; - int ret = AUTH_GSS_CONTINUE; - - // Always clear out the old response - if(state->response != NULL) { - free(state->response); - state->response = NULL; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if(challenge && *challenge) { - int len; - input_token.value = base64_decode(challenge, &len); - input_token.length = len; - } - - // Do GSSAPI step - maj_stat = gss_unwrap(&min_stat, - state->context, - &input_token, - &output_token, - NULL, - NULL); - - if(maj_stat != GSS_S_COMPLETE) { - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } else { - ret = AUTH_GSS_COMPLETE; - } - - // Grab the client response - if(output_token.length) { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length); - maj_stat = gss_release_buffer(&min_stat, &output_token); - } -end: - if(output_token.value) - gss_release_buffer(&min_stat, &output_token); - if(input_token.value) - free(input_token.value); - - if(response == NULL) { - response = calloc(1, sizeof(gss_client_response)); - response->return_code = ret; - } - - // Return the response - return response; -} - -gss_client_response *authenticate_gss_client_wrap(gss_client_state* state, const char* challenge, const char* user) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - gss_client_response *response = NULL; - char buf[4096], server_conf_flags; - unsigned long buf_size; - - // Always clear out the old response - if(state->response != NULL) { - free(state->response); - state->response = NULL; - } - - if(challenge && *challenge) { - int len; - input_token.value = base64_decode(challenge, &len); - input_token.length = len; - } - - if(user) { - // get bufsize - server_conf_flags = ((char*) input_token.value)[0]; - ((char*) input_token.value)[0] = 0; - buf_size = ntohl(*((long *) input_token.value)); - free(input_token.value); -#ifdef PRINTFS - printf("User: %s, %c%c%c\n", user, - server_conf_flags & GSS_AUTH_P_NONE ? 'N' : '-', - server_conf_flags & GSS_AUTH_P_INTEGRITY ? 'I' : '-', - server_conf_flags & GSS_AUTH_P_PRIVACY ? 'P' : '-'); - printf("Maximum GSS token size is %ld\n", buf_size); -#endif - - // agree to terms (hack!) - buf_size = htonl(buf_size); // not relevant without integrity/privacy - memcpy(buf, &buf_size, 4); - buf[0] = GSS_AUTH_P_NONE; - // server decides if principal can log in as user - strncpy(buf + 4, user, sizeof(buf) - 4); - input_token.value = buf; - input_token.length = 4 + strlen(user); - } - - // Do GSSAPI wrap - maj_stat = gss_wrap(&min_stat, - state->context, - 0, - GSS_C_QOP_DEFAULT, - &input_token, - NULL, - &output_token); - - if (maj_stat != GSS_S_COMPLETE) { - response = gss_error(maj_stat, min_stat); - response->return_code = AUTH_GSS_ERROR; - goto end; - } else - ret = AUTH_GSS_COMPLETE; - // Grab the client response to send back to the server - if (output_token.length) { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length);; - maj_stat = gss_release_buffer(&min_stat, &output_token); - } -end: - if (output_token.value) - gss_release_buffer(&min_stat, &output_token); - - if(response == NULL) { - response = calloc(1, sizeof(gss_client_response)); - response->return_code = ret; - } - - // Return the response - return response; -} - -int authenticate_gss_server_init(const char *service, gss_server_state *state) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc name_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_COMPLETE; - - state->context = GSS_C_NO_CONTEXT; - state->server_name = GSS_C_NO_NAME; - state->client_name = GSS_C_NO_NAME; - state->server_creds = GSS_C_NO_CREDENTIAL; - state->client_creds = GSS_C_NO_CREDENTIAL; - state->username = NULL; - state->targetname = NULL; - state->response = NULL; - - // Server name may be empty which means we aren't going to create our own creds - size_t service_len = strlen(service); - if (service_len != 0) - { - // Import server name first - name_token.length = strlen(service); - name_token.value = (char *)service; - - maj_stat = gss_import_name(&min_stat, &name_token, GSS_C_NT_HOSTBASED_SERVICE, &state->server_name); - - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - // Get credentials - maj_stat = gss_acquire_cred(&min_stat, state->server_name, GSS_C_INDEFINITE, - GSS_C_NO_OID_SET, GSS_C_ACCEPT, &state->server_creds, NULL, NULL); - - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - } - -end: - return ret; -} - -int authenticate_gss_server_clean(gss_server_state *state) -{ - OM_uint32 min_stat; - int ret = AUTH_GSS_COMPLETE; - - if (state->context != GSS_C_NO_CONTEXT) - gss_delete_sec_context(&min_stat, &state->context, GSS_C_NO_BUFFER); - if (state->server_name != GSS_C_NO_NAME) - gss_release_name(&min_stat, &state->server_name); - if (state->client_name != GSS_C_NO_NAME) - gss_release_name(&min_stat, &state->client_name); - if (state->server_creds != GSS_C_NO_CREDENTIAL) - gss_release_cred(&min_stat, &state->server_creds); - if (state->client_creds != GSS_C_NO_CREDENTIAL) - gss_release_cred(&min_stat, &state->client_creds); - if (state->username != NULL) - { - free(state->username); - state->username = NULL; - } - if (state->targetname != NULL) - { - free(state->targetname); - state->targetname = NULL; - } - if (state->response != NULL) - { - free(state->response); - state->response = NULL; - } - - return ret; -} - -/*int authenticate_gss_server_step(gss_server_state *state, const char *challenge) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - - // Always clear out the old response - if (state->response != NULL) - { - free(state->response); - state->response = NULL; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if (challenge && *challenge) - { - int len; - input_token.value = base64_decode(challenge, &len); - input_token.length = len; - } - else - { - PyErr_SetString(KrbException_class, "No challenge parameter in request from client"); - ret = AUTH_GSS_ERROR; - goto end; - } - - maj_stat = gss_accept_sec_context(&min_stat, - &state->context, - state->server_creds, - &input_token, - GSS_C_NO_CHANNEL_BINDINGS, - &state->client_name, - NULL, - &output_token, - NULL, - NULL, - &state->client_creds); - - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - // Grab the server response to send back to the client - if (output_token.length) - { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length);; - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - - // Get the user name - maj_stat = gss_display_name(&min_stat, state->client_name, &output_token, NULL); - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - state->username = (char *)malloc(output_token.length + 1); - strncpy(state->username, (char*) output_token.value, output_token.length); - state->username[output_token.length] = 0; - - // Get the target name if no server creds were supplied - if (state->server_creds == GSS_C_NO_CREDENTIAL) - { - gss_name_t target_name = GSS_C_NO_NAME; - maj_stat = gss_inquire_context(&min_stat, state->context, NULL, &target_name, NULL, NULL, NULL, NULL, NULL); - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - maj_stat = gss_display_name(&min_stat, target_name, &output_token, NULL); - if (GSS_ERROR(maj_stat)) - { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - state->targetname = (char *)malloc(output_token.length + 1); - strncpy(state->targetname, (char*) output_token.value, output_token.length); - state->targetname[output_token.length] = 0; - } - - ret = AUTH_GSS_COMPLETE; - -end: - if (output_token.length) - gss_release_buffer(&min_stat, &output_token); - if (input_token.value) - free(input_token.value); - return ret; -} -*/ - -static void set_gss_error(OM_uint32 err_maj, OM_uint32 err_min) { - OM_uint32 maj_stat, min_stat; - OM_uint32 msg_ctx = 0; - gss_buffer_desc status_string; - char buf_maj[512]; - char buf_min[512]; - - do { - maj_stat = gss_display_status (&min_stat, - err_maj, - GSS_C_GSS_CODE, - GSS_C_NO_OID, - &msg_ctx, - &status_string); - if(GSS_ERROR(maj_stat)) - break; - - strncpy(buf_maj, (char*) status_string.value, sizeof(buf_maj)); - gss_release_buffer(&min_stat, &status_string); - - maj_stat = gss_display_status (&min_stat, - err_min, - GSS_C_MECH_CODE, - GSS_C_NULL_OID, - &msg_ctx, - &status_string); - if (!GSS_ERROR(maj_stat)) { - - strncpy(buf_min, (char*) status_string.value , sizeof(buf_min)); - gss_release_buffer(&min_stat, &status_string); - } - } while (!GSS_ERROR(maj_stat) && msg_ctx != 0); -} - -gss_client_response *gss_error(OM_uint32 err_maj, OM_uint32 err_min) { - OM_uint32 maj_stat, min_stat; - OM_uint32 msg_ctx = 0; - gss_buffer_desc status_string; - char *buf_maj = calloc(512, sizeof(char)); - char *buf_min = calloc(512, sizeof(char)); - char *message = NULL; - gss_client_response *response = calloc(1, sizeof(gss_client_response)); - - do { - maj_stat = gss_display_status (&min_stat, - err_maj, - GSS_C_GSS_CODE, - GSS_C_NO_OID, - &msg_ctx, - &status_string); - if(GSS_ERROR(maj_stat)) - break; - - strncpy(buf_maj, (char*) status_string.value, 512); - gss_release_buffer(&min_stat, &status_string); - - maj_stat = gss_display_status (&min_stat, - err_min, - GSS_C_MECH_CODE, - GSS_C_NULL_OID, - &msg_ctx, - &status_string); - if(!GSS_ERROR(maj_stat)) { - strncpy(buf_min, (char*) status_string.value , 512); - gss_release_buffer(&min_stat, &status_string); - } - } while (!GSS_ERROR(maj_stat) && msg_ctx != 0); - - // Join the strings - message = calloc(1026, 1); - // Join the two messages - sprintf(message, "%s, %s", buf_maj, buf_min); - // Free data - free(buf_min); - free(buf_maj); - // Set the message - response->message = message; - // Return the message - return response; -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h deleted file mode 100644 index 58ac0b714..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/kerberosgss.h +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Copyright (c) 2006-2009 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ -#ifndef KERBEROS_GSS_H -#define KERBEROS_GSS_H - -#include -#include -#include - -#define krb5_get_err_text(context,code) error_message(code) - -#define AUTH_GSS_ERROR -1 -#define AUTH_GSS_COMPLETE 1 -#define AUTH_GSS_CONTINUE 0 - -#define GSS_AUTH_P_NONE 1 -#define GSS_AUTH_P_INTEGRITY 2 -#define GSS_AUTH_P_PRIVACY 4 - -typedef struct { - int return_code; - char *message; -} gss_client_response; - -typedef struct { - gss_ctx_id_t context; - gss_name_t server_name; - long int gss_flags; - char* username; - char* response; -} gss_client_state; - -typedef struct { - gss_ctx_id_t context; - gss_name_t server_name; - gss_name_t client_name; - gss_cred_id_t server_creds; - gss_cred_id_t client_creds; - char* username; - char* targetname; - char* response; -} gss_server_state; - -// char* server_principal_details(const char* service, const char* hostname); - -gss_client_response *authenticate_gss_client_init(const char* service, long int gss_flags, gss_client_state* state); -gss_client_response *authenticate_gss_client_clean(gss_client_state *state); -gss_client_response *authenticate_gss_client_step(gss_client_state *state, const char *challenge); -gss_client_response *authenticate_gss_client_unwrap(gss_client_state* state, const char* challenge); -gss_client_response *authenticate_gss_client_wrap(gss_client_state* state, const char* challenge, const char* user); - -int authenticate_gss_server_init(const char* service, gss_server_state* state); -int authenticate_gss_server_clean(gss_server_state *state); -// int authenticate_gss_server_step(gss_server_state *state, const char *challenge); - -gss_client_response *gss_error(OM_uint32 err_maj, OM_uint32 err_min); -#endif diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/sspi.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/sspi.js deleted file mode 100644 index d9120fba3..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/sspi.js +++ /dev/null @@ -1,15 +0,0 @@ -// Load the native SSPI classes -var kerberos = require('../build/Release/kerberos') - , Kerberos = kerberos.Kerberos - , SecurityBuffer = require('./win32/wrappers/security_buffer').SecurityBuffer - , SecurityBufferDescriptor = require('./win32/wrappers/security_buffer_descriptor').SecurityBufferDescriptor - , SecurityCredentials = require('./win32/wrappers/security_credentials').SecurityCredentials - , SecurityContext = require('./win32/wrappers/security_context').SecurityContext; -var SSPI = function() { -} - -exports.SSPI = SSPI; -exports.SecurityBuffer = SecurityBuffer; -exports.SecurityBufferDescriptor = SecurityBufferDescriptor; -exports.SecurityCredentials = SecurityCredentials; -exports.SecurityContext = SecurityContext; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c deleted file mode 100644 index 502a021c8..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.c +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Copyright (c) 2006-2008 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include "base64.h" - -#include -#include - -// base64 tables -static char basis_64[] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; -static signed char index_64[128] = -{ - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63, - 52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1,-1,-1,-1, - -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14, - 15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1, - -1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40, - 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1 -}; -#define CHAR64(c) (((c) < 0 || (c) > 127) ? -1 : index_64[(c)]) - -// base64_encode : base64 encode -// -// value : data to encode -// vlen : length of data -// (result) : new char[] - c-str of result -char *base64_encode(const unsigned char *value, int vlen) -{ - char *result = (char *)malloc((vlen * 4) / 3 + 5); - char *out = result; - unsigned char oval; - - while (vlen >= 3) - { - *out++ = basis_64[value[0] >> 2]; - *out++ = basis_64[((value[0] << 4) & 0x30) | (value[1] >> 4)]; - *out++ = basis_64[((value[1] << 2) & 0x3C) | (value[2] >> 6)]; - *out++ = basis_64[value[2] & 0x3F]; - value += 3; - vlen -= 3; - } - if (vlen > 0) - { - *out++ = basis_64[value[0] >> 2]; - oval = (value[0] << 4) & 0x30; - if (vlen > 1) oval |= value[1] >> 4; - *out++ = basis_64[oval]; - *out++ = (vlen < 2) ? '=' : basis_64[(value[1] << 2) & 0x3C]; - *out++ = '='; - } - *out = '\0'; - - return result; -} - -// base64_decode : base64 decode -// -// value : c-str to decode -// rlen : length of decoded result -// (result) : new unsigned char[] - decoded result -unsigned char *base64_decode(const char *value, int *rlen) -{ - int c1, c2, c3, c4; - int vlen = (int)strlen(value); - unsigned char *result =(unsigned char *)malloc((vlen * 3) / 4 + 1); - unsigned char *out = result; - *rlen = 0; - - while (1) - { - if (value[0]==0) - return result; - c1 = value[0]; - if (CHAR64(c1) == -1) - goto base64_decode_error;; - c2 = value[1]; - if (CHAR64(c2) == -1) - goto base64_decode_error;; - c3 = value[2]; - if ((c3 != '=') && (CHAR64(c3) == -1)) - goto base64_decode_error;; - c4 = value[3]; - if ((c4 != '=') && (CHAR64(c4) == -1)) - goto base64_decode_error;; - - value += 4; - *out++ = (CHAR64(c1) << 2) | (CHAR64(c2) >> 4); - *rlen += 1; - if (c3 != '=') - { - *out++ = ((CHAR64(c2) << 4) & 0xf0) | (CHAR64(c3) >> 2); - *rlen += 1; - if (c4 != '=') - { - *out++ = ((CHAR64(c3) << 6) & 0xc0) | CHAR64(c4); - *rlen += 1; - } - } - } - -base64_decode_error: - *result = 0; - *rlen = 0; - return result; -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h deleted file mode 100644 index f0e1f0616..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/base64.h +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Copyright (c) 2006-2008 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -char *base64_encode(const unsigned char *value, int vlen); -unsigned char *base64_decode(const char *value, int *rlen); diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc deleted file mode 100644 index 7fd521b8a..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.cc +++ /dev/null @@ -1,53 +0,0 @@ -#include "kerberos.h" -#include -#include -#include "base64.h" -#include "wrappers/security_buffer.h" -#include "wrappers/security_buffer_descriptor.h" -#include "wrappers/security_context.h" -#include "wrappers/security_credentials.h" - -Persistent Kerberos::constructor_template; - -// VException object (causes throw in calling code) -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -} - -Kerberos::Kerberos() : ObjectWrap() { -} - -void Kerberos::Initialize(v8::Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(Kerberos::New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("Kerberos")); - // Set the symbol - target->ForceSet(String::NewSymbol("Kerberos"), constructor_template->GetFunction()); -} - -Handle Kerberos::New(const Arguments &args) { - // Load the security.dll library - load_library(); - // Create a Kerberos instance - Kerberos *kerberos = new Kerberos(); - // Return the kerberos object - kerberos->Wrap(args.This()); - return args.This(); -} - -// Exporting function -extern "C" void init(Handle target) { - HandleScope scope; - Kerberos::Initialize(target); - SecurityContext::Initialize(target); - SecurityBuffer::Initialize(target); - SecurityBufferDescriptor::Initialize(target); - SecurityCredentials::Initialize(target); -} - -NODE_MODULE(kerberos, init); diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h deleted file mode 100644 index 8443e78ab..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos.h +++ /dev/null @@ -1,59 +0,0 @@ -#ifndef KERBEROS_H -#define KERBEROS_H - -#include -#include -#include - -extern "C" { - #include "kerberos_sspi.h" - #include "base64.h" -} - -using namespace v8; -using namespace node; - -class Kerberos : public ObjectWrap { - -public: - Kerberos(); - ~Kerberos() {}; - - // Constructor used for creating new Kerberos objects from C++ - static Persistent constructor_template; - - // Initialize function for the object - static void Initialize(Handle target); - - // Method available - static Handle AcquireAlternateCredentials(const Arguments &args); - static Handle PrepareOutboundPackage(const Arguments &args); - static Handle DecryptMessage(const Arguments &args); - static Handle EncryptMessage(const Arguments &args); - static Handle QueryContextAttributes(const Arguments &args); - -private: - static Handle New(const Arguments &args); - - // Pointer to context object - SEC_WINNT_AUTH_IDENTITY m_Identity; - // credentials - CredHandle m_Credentials; - // Expiry time for ticket - TimeStamp Expiration; - // package info - SecPkgInfo m_PkgInfo; - // context - CtxtHandle m_Context; - // Do we have a context - bool m_HaveContext; - // Attributes - DWORD CtxtAttr; - - // Handles the uv calls - static void Process(uv_work_t* work_req); - // Called after work is done - static void After(uv_work_t* work_req); -}; - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c deleted file mode 100644 index d75c9ab0a..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.c +++ /dev/null @@ -1,244 +0,0 @@ -#include "kerberos_sspi.h" -#include -#include - -static HINSTANCE _sspi_security_dll = NULL; -static HINSTANCE _sspi_secur32_dll = NULL; - -/** - * Encrypt A Message - */ -SECURITY_STATUS SEC_ENTRY _sspi_EncryptMessage(PCtxtHandle phContext, unsigned long fQOP, PSecBufferDesc pMessage, unsigned long MessageSeqNo) { - // Create function pointer instance - encryptMessage_fn pfn_encryptMessage = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - - // Map function to library method - pfn_encryptMessage = (encryptMessage_fn)GetProcAddress(_sspi_security_dll, "EncryptMessage"); - // Check if the we managed to map function pointer - if(!pfn_encryptMessage) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Call the function - return (*pfn_encryptMessage)(phContext, fQOP, pMessage, MessageSeqNo); -} - -/** - * Acquire Credentials - */ -SECURITY_STATUS SEC_ENTRY _sspi_AcquireCredentialsHandle( - LPSTR pszPrincipal, LPSTR pszPackage, unsigned long fCredentialUse, - void * pvLogonId, void * pAuthData, SEC_GET_KEY_FN pGetKeyFn, void * pvGetKeyArgument, - PCredHandle phCredential, PTimeStamp ptsExpiry -) { - SECURITY_STATUS status; - // Create function pointer instance - acquireCredentialsHandle_fn pfn_acquireCredentialsHandle = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - - // Map function - #ifdef _UNICODE - pfn_acquireCredentialsHandle = (acquireCredentialsHandle_fn)GetProcAddress(_sspi_security_dll, "AcquireCredentialsHandleW"); - #else - pfn_acquireCredentialsHandle = (acquireCredentialsHandle_fn)GetProcAddress(_sspi_security_dll, "AcquireCredentialsHandleA"); - #endif - - // Check if the we managed to map function pointer - if(!pfn_acquireCredentialsHandle) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Status - status = (*pfn_acquireCredentialsHandle)(pszPrincipal, pszPackage, fCredentialUse, - pvLogonId, pAuthData, pGetKeyFn, pvGetKeyArgument, phCredential, ptsExpiry - ); - - // Call the function - return status; -} - -/** - * Delete Security Context - */ -SECURITY_STATUS SEC_ENTRY _sspi_DeleteSecurityContext(PCtxtHandle phContext) { - // Create function pointer instance - deleteSecurityContext_fn pfn_deleteSecurityContext = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - // Map function - pfn_deleteSecurityContext = (deleteSecurityContext_fn)GetProcAddress(_sspi_security_dll, "DeleteSecurityContext"); - - // Check if the we managed to map function pointer - if(!pfn_deleteSecurityContext) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Call the function - return (*pfn_deleteSecurityContext)(phContext); -} - -/** - * Decrypt Message - */ -SECURITY_STATUS SEC_ENTRY _sspi_DecryptMessage(PCtxtHandle phContext, PSecBufferDesc pMessage, unsigned long MessageSeqNo, unsigned long pfQOP) { - // Create function pointer instance - decryptMessage_fn pfn_decryptMessage = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - // Map function - pfn_decryptMessage = (decryptMessage_fn)GetProcAddress(_sspi_security_dll, "DecryptMessage"); - - // Check if the we managed to map function pointer - if(!pfn_decryptMessage) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Call the function - return (*pfn_decryptMessage)(phContext, pMessage, MessageSeqNo, pfQOP); -} - -/** - * Initialize Security Context - */ -SECURITY_STATUS SEC_ENTRY _sspi_initializeSecurityContext( - PCredHandle phCredential, PCtxtHandle phContext, - LPSTR pszTargetName, unsigned long fContextReq, - unsigned long Reserved1, unsigned long TargetDataRep, - PSecBufferDesc pInput, unsigned long Reserved2, - PCtxtHandle phNewContext, PSecBufferDesc pOutput, - unsigned long * pfContextAttr, PTimeStamp ptsExpiry -) { - SECURITY_STATUS status; - // Create function pointer instance - initializeSecurityContext_fn pfn_initializeSecurityContext = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - - // Map function - #ifdef _UNICODE - pfn_initializeSecurityContext = (initializeSecurityContext_fn)GetProcAddress(_sspi_security_dll, "InitializeSecurityContextW"); - #else - pfn_initializeSecurityContext = (initializeSecurityContext_fn)GetProcAddress(_sspi_security_dll, "InitializeSecurityContextA"); - #endif - - // Check if the we managed to map function pointer - if(!pfn_initializeSecurityContext) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Execute intialize context - status = (*pfn_initializeSecurityContext)( - phCredential, phContext, pszTargetName, fContextReq, - Reserved1, TargetDataRep, pInput, Reserved2, - phNewContext, pOutput, pfContextAttr, ptsExpiry - ); - - // Call the function - return status; -} -/** - * Query Context Attributes - */ -SECURITY_STATUS SEC_ENTRY _sspi_QueryContextAttributes( - PCtxtHandle phContext, unsigned long ulAttribute, void * pBuffer -) { - // Create function pointer instance - queryContextAttributes_fn pfn_queryContextAttributes = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return -1; - - #ifdef _UNICODE - pfn_queryContextAttributes = (queryContextAttributes_fn)GetProcAddress(_sspi_security_dll, "QueryContextAttributesW"); - #else - pfn_queryContextAttributes = (queryContextAttributes_fn)GetProcAddress(_sspi_security_dll, "QueryContextAttributesA"); - #endif - - // Check if the we managed to map function pointer - if(!pfn_queryContextAttributes) { - printf("GetProcAddress failed.\n"); - return -2; - } - - // Call the function - return (*pfn_queryContextAttributes)( - phContext, ulAttribute, pBuffer - ); -} - -/** - * InitSecurityInterface - */ -PSecurityFunctionTable _ssip_InitSecurityInterface() { - INIT_SECURITY_INTERFACE InitSecurityInterface; - PSecurityFunctionTable pSecurityInterface = NULL; - - // Return error if library not loaded - if(_sspi_security_dll == NULL) return NULL; - - #ifdef _UNICODE - // Get the address of the InitSecurityInterface function. - InitSecurityInterface = (INIT_SECURITY_INTERFACE) GetProcAddress ( - _sspi_secur32_dll, - TEXT("InitSecurityInterfaceW")); - #else - // Get the address of the InitSecurityInterface function. - InitSecurityInterface = (INIT_SECURITY_INTERFACE) GetProcAddress ( - _sspi_secur32_dll, - TEXT("InitSecurityInterfaceA")); - #endif - - if(!InitSecurityInterface) { - printf (TEXT("Failed in getting the function address, Error: %x"), GetLastError ()); - return NULL; - } - - // Use InitSecurityInterface to get the function table. - pSecurityInterface = (*InitSecurityInterface)(); - - if(!pSecurityInterface) { - printf (TEXT("Failed in getting the function table, Error: %x"), GetLastError ()); - return NULL; - } - - return pSecurityInterface; -} - -/** - * Load security.dll dynamically - */ -int load_library() { - DWORD err; - // Load the library - _sspi_security_dll = LoadLibrary("security.dll"); - - // Check if the library loaded - if(_sspi_security_dll == NULL) { - err = GetLastError(); - return err; - } - - // Load the library - _sspi_secur32_dll = LoadLibrary("secur32.dll"); - - // Check if the library loaded - if(_sspi_secur32_dll == NULL) { - err = GetLastError(); - return err; - } - - return 0; -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h deleted file mode 100644 index a3008dc53..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/kerberos_sspi.h +++ /dev/null @@ -1,106 +0,0 @@ -#ifndef SSPI_C_H -#define SSPI_C_H - -#define SECURITY_WIN32 1 - -#include -#include - -/** - * Encrypt A Message - */ -SECURITY_STATUS SEC_ENTRY _sspi_EncryptMessage(PCtxtHandle phContext, unsigned long fQOP, PSecBufferDesc pMessage, unsigned long MessageSeqNo); - -typedef DWORD (WINAPI *encryptMessage_fn)(PCtxtHandle phContext, ULONG fQOP, PSecBufferDesc pMessage, ULONG MessageSeqNo); - -/** - * Acquire Credentials - */ -SECURITY_STATUS SEC_ENTRY _sspi_AcquireCredentialsHandle( - LPSTR pszPrincipal, // Name of principal - LPSTR pszPackage, // Name of package - unsigned long fCredentialUse, // Flags indicating use - void * pvLogonId, // Pointer to logon ID - void * pAuthData, // Package specific data - SEC_GET_KEY_FN pGetKeyFn, // Pointer to GetKey() func - void * pvGetKeyArgument, // Value to pass to GetKey() - PCredHandle phCredential, // (out) Cred Handle - PTimeStamp ptsExpiry // (out) Lifetime (optional) -); - -typedef DWORD (WINAPI *acquireCredentialsHandle_fn)( - LPSTR pszPrincipal, LPSTR pszPackage, unsigned long fCredentialUse, - void * pvLogonId, void * pAuthData, SEC_GET_KEY_FN pGetKeyFn, void * pvGetKeyArgument, - PCredHandle phCredential, PTimeStamp ptsExpiry - ); - -/** - * Delete Security Context - */ -SECURITY_STATUS SEC_ENTRY _sspi_DeleteSecurityContext( - PCtxtHandle phContext // Context to delete -); - -typedef DWORD (WINAPI *deleteSecurityContext_fn)(PCtxtHandle phContext); - -/** - * Decrypt Message - */ -SECURITY_STATUS SEC_ENTRY _sspi_DecryptMessage( - PCtxtHandle phContext, - PSecBufferDesc pMessage, - unsigned long MessageSeqNo, - unsigned long pfQOP -); - -typedef DWORD (WINAPI *decryptMessage_fn)( - PCtxtHandle phContext, PSecBufferDesc pMessage, unsigned long MessageSeqNo, unsigned long pfQOP); - -/** - * Initialize Security Context - */ -SECURITY_STATUS SEC_ENTRY _sspi_initializeSecurityContext( - PCredHandle phCredential, // Cred to base context - PCtxtHandle phContext, // Existing context (OPT) - LPSTR pszTargetName, // Name of target - unsigned long fContextReq, // Context Requirements - unsigned long Reserved1, // Reserved, MBZ - unsigned long TargetDataRep, // Data rep of target - PSecBufferDesc pInput, // Input Buffers - unsigned long Reserved2, // Reserved, MBZ - PCtxtHandle phNewContext, // (out) New Context handle - PSecBufferDesc pOutput, // (inout) Output Buffers - unsigned long * pfContextAttr, // (out) Context attrs - PTimeStamp ptsExpiry // (out) Life span (OPT) -); - -typedef DWORD (WINAPI *initializeSecurityContext_fn)( - PCredHandle phCredential, PCtxtHandle phContext, LPSTR pszTargetName, unsigned long fContextReq, - unsigned long Reserved1, unsigned long TargetDataRep, PSecBufferDesc pInput, unsigned long Reserved2, - PCtxtHandle phNewContext, PSecBufferDesc pOutput, unsigned long * pfContextAttr, PTimeStamp ptsExpiry); - -/** - * Query Context Attributes - */ -SECURITY_STATUS SEC_ENTRY _sspi_QueryContextAttributes( - PCtxtHandle phContext, // Context to query - unsigned long ulAttribute, // Attribute to query - void * pBuffer // Buffer for attributes -); - -typedef DWORD (WINAPI *queryContextAttributes_fn)( - PCtxtHandle phContext, unsigned long ulAttribute, void * pBuffer); - -/** - * InitSecurityInterface - */ -PSecurityFunctionTable _ssip_InitSecurityInterface(); - -typedef DWORD (WINAPI *initSecurityInterface_fn) (); - -/** - * Load security.dll dynamically - */ -int load_library(); - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc deleted file mode 100644 index e7a472f67..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.cc +++ /dev/null @@ -1,7 +0,0 @@ -#include "worker.h" - -Worker::Worker() { -} - -Worker::~Worker() { -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h deleted file mode 100644 index f73a4a768..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/worker.h +++ /dev/null @@ -1,37 +0,0 @@ -#ifndef WORKER_H_ -#define WORKER_H_ - -#include -#include -#include - -using namespace node; -using namespace v8; - -class Worker { - public: - Worker(); - virtual ~Worker(); - - // libuv's request struct. - uv_work_t request; - // Callback - v8::Persistent callback; - // Parameters - void *parameters; - // Results - void *return_value; - // Did we raise an error - bool error; - // The error message - char *error_message; - // Error code if not message - int error_code; - // Any return code - int return_code; - // Method we are going to fire - void (*execute)(Worker *worker); - Handle (*mapper)(Worker *worker); -}; - -#endif // WORKER_H_ diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc deleted file mode 100644 index dd38b5928..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.cc +++ /dev/null @@ -1,110 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "security_buffer.h" - -using namespace node; - -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -}; - -Persistent SecurityBuffer::constructor_template; - -SecurityBuffer::SecurityBuffer(uint32_t security_type, size_t size) : ObjectWrap() { - this->size = size; - this->data = calloc(size, sizeof(char)); - this->security_type = security_type; - // Set up the data in the sec_buffer - this->sec_buffer.BufferType = security_type; - this->sec_buffer.cbBuffer = (unsigned long)size; - this->sec_buffer.pvBuffer = this->data; -} - -SecurityBuffer::SecurityBuffer(uint32_t security_type, size_t size, void *data) : ObjectWrap() { - this->size = size; - this->data = data; - this->security_type = security_type; - // Set up the data in the sec_buffer - this->sec_buffer.BufferType = security_type; - this->sec_buffer.cbBuffer = (unsigned long)size; - this->sec_buffer.pvBuffer = this->data; -} - -SecurityBuffer::~SecurityBuffer() { - free(this->data); -} - -Handle SecurityBuffer::New(const Arguments &args) { - HandleScope scope; - SecurityBuffer *security_obj; - - if(args.Length() != 2) - return VException("Two parameters needed integer buffer type and [32 bit integer/Buffer] required"); - - if(!args[0]->IsInt32()) - return VException("Two parameters needed integer buffer type and [32 bit integer/Buffer] required"); - - if(!args[1]->IsInt32() && !Buffer::HasInstance(args[1])) - return VException("Two parameters needed integer buffer type and [32 bit integer/Buffer] required"); - - // Unpack buffer type - uint32_t buffer_type = args[0]->ToUint32()->Value(); - - // If we have an integer - if(args[1]->IsInt32()) { - security_obj = new SecurityBuffer(buffer_type, args[1]->ToUint32()->Value()); - } else { - // Get the length of the Buffer - size_t length = Buffer::Length(args[1]->ToObject()); - // Allocate space for the internal void data pointer - void *data = calloc(length, sizeof(char)); - // Write the data to out of V8 heap space - memcpy(data, Buffer::Data(args[1]->ToObject()), length); - // Create new SecurityBuffer - security_obj = new SecurityBuffer(buffer_type, length, data); - } - - // Wrap it - security_obj->Wrap(args.This()); - // Return the object - return args.This(); -} - -Handle SecurityBuffer::ToBuffer(const Arguments &args) { - HandleScope scope; - - // Unpack the Security Buffer object - SecurityBuffer *security_obj = ObjectWrap::Unwrap(args.This()); - // Create a Buffer - Buffer *buffer = Buffer::New((char *)security_obj->data, (size_t)security_obj->size); - - // Return the buffer - return scope.Close(buffer->handle_); -} - -void SecurityBuffer::Initialize(Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("SecurityBuffer")); - - // Set up method for the Kerberos instance - NODE_SET_PROTOTYPE_METHOD(constructor_template, "toBuffer", ToBuffer); - - // Set up class - target->Set(String::NewSymbol("SecurityBuffer"), constructor_template->GetFunction()); -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h deleted file mode 100644 index d6a567510..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.h +++ /dev/null @@ -1,46 +0,0 @@ -#ifndef SECURITY_BUFFER_H -#define SECURITY_BUFFER_H - -#include -#include -#include - -#define SECURITY_WIN32 1 - -#include -#include - -using namespace v8; -using namespace node; - -class SecurityBuffer : public ObjectWrap { - public: - SecurityBuffer(uint32_t security_type, size_t size); - SecurityBuffer(uint32_t security_type, size_t size, void *data); - ~SecurityBuffer(); - - // Internal values - void *data; - size_t size; - uint32_t security_type; - SecBuffer sec_buffer; - - // Has instance check - static inline bool HasInstance(Handle val) { - if (!val->IsObject()) return false; - Local obj = val->ToObject(); - return constructor_template->HasInstance(obj); - }; - - // Functions available from V8 - static void Initialize(Handle target); - static Handle ToBuffer(const Arguments &args); - - // Constructor used for creating new Long objects from C++ - static Persistent constructor_template; - - private: - static Handle New(const Arguments &args); -}; - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js deleted file mode 100644 index 4996163c9..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer.js +++ /dev/null @@ -1,12 +0,0 @@ -var SecurityBufferNative = require('../../../build/Release/kerberos').SecurityBuffer; - -// Add some attributes -SecurityBufferNative.VERSION = 0; -SecurityBufferNative.EMPTY = 0; -SecurityBufferNative.DATA = 1; -SecurityBufferNative.TOKEN = 2; -SecurityBufferNative.PADDING = 9; -SecurityBufferNative.STREAM = 10; - -// Export the modified class -exports.SecurityBuffer = SecurityBufferNative; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc deleted file mode 100644 index 560ef50cf..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.cc +++ /dev/null @@ -1,177 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define SECURITY_WIN32 1 - -#include "security_buffer_descriptor.h" -#include "security_buffer.h" - -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -}; - -Persistent SecurityBufferDescriptor::constructor_template; - -SecurityBufferDescriptor::SecurityBufferDescriptor() : ObjectWrap() { -} - -SecurityBufferDescriptor::SecurityBufferDescriptor(Persistent arrayObject) : ObjectWrap() { - SecurityBuffer *security_obj = NULL; - // Safe reference to array - this->arrayObject = arrayObject; - - // Unpack the array and ensure we have a valid descriptor - this->secBufferDesc.cBuffers = arrayObject->Length(); - this->secBufferDesc.ulVersion = SECBUFFER_VERSION; - - if(arrayObject->Length() == 1) { - // Unwrap the buffer - security_obj = ObjectWrap::Unwrap(arrayObject->Get(0)->ToObject()); - // Assign the buffer - this->secBufferDesc.pBuffers = &security_obj->sec_buffer; - } else { - this->secBufferDesc.pBuffers = new SecBuffer[arrayObject->Length()]; - this->secBufferDesc.cBuffers = arrayObject->Length(); - - // Assign the buffers - for(uint32_t i = 0; i < arrayObject->Length(); i++) { - security_obj = ObjectWrap::Unwrap(arrayObject->Get(i)->ToObject()); - this->secBufferDesc.pBuffers[i].BufferType = security_obj->sec_buffer.BufferType; - this->secBufferDesc.pBuffers[i].pvBuffer = security_obj->sec_buffer.pvBuffer; - this->secBufferDesc.pBuffers[i].cbBuffer = security_obj->sec_buffer.cbBuffer; - } - } -} - -SecurityBufferDescriptor::~SecurityBufferDescriptor() { -} - -size_t SecurityBufferDescriptor::bufferSize() { - SecurityBuffer *security_obj = NULL; - - if(this->secBufferDesc.cBuffers == 1) { - security_obj = ObjectWrap::Unwrap(arrayObject->Get(0)->ToObject()); - return security_obj->size; - } else { - int bytesToAllocate = 0; - - for(unsigned int i = 0; i < this->secBufferDesc.cBuffers; i++) { - bytesToAllocate += this->secBufferDesc.pBuffers[i].cbBuffer; - } - - // Return total size - return bytesToAllocate; - } -} - -char *SecurityBufferDescriptor::toBuffer() { - SecurityBuffer *security_obj = NULL; - char *data = NULL; - - if(this->secBufferDesc.cBuffers == 1) { - security_obj = ObjectWrap::Unwrap(arrayObject->Get(0)->ToObject()); - data = (char *)malloc(security_obj->size * sizeof(char)); - memcpy(data, security_obj->data, security_obj->size); - } else { - size_t bytesToAllocate = this->bufferSize(); - char *data = (char *)calloc(bytesToAllocate, sizeof(char)); - int offset = 0; - - for(unsigned int i = 0; i < this->secBufferDesc.cBuffers; i++) { - memcpy((data + offset), this->secBufferDesc.pBuffers[i].pvBuffer, this->secBufferDesc.pBuffers[i].cbBuffer); - offset +=this->secBufferDesc.pBuffers[i].cbBuffer; - } - - // Return the data - return data; - } - - return data; -} - -Handle SecurityBufferDescriptor::New(const Arguments &args) { - HandleScope scope; - SecurityBufferDescriptor *security_obj; - Persistent arrayObject; - - if(args.Length() != 1) - return VException("There must be 1 argument passed in where the first argument is a [int32 or an Array of SecurityBuffers]"); - - if(!args[0]->IsInt32() && !args[0]->IsArray()) - return VException("There must be 1 argument passed in where the first argument is a [int32 or an Array of SecurityBuffers]"); - - if(args[0]->IsArray()) { - Handle array = Handle::Cast(args[0]); - // Iterate over all items and ensure we the right type - for(uint32_t i = 0; i < array->Length(); i++) { - if(!SecurityBuffer::HasInstance(array->Get(i))) { - return VException("There must be 1 argument passed in where the first argument is a [int32 or an Array of SecurityBuffers]"); - } - } - } - - // We have a single integer - if(args[0]->IsInt32()) { - // Create new SecurityBuffer instance - Local argv[] = {Int32::New(0x02), args[0]}; - Handle security_buffer = SecurityBuffer::constructor_template->GetFunction()->NewInstance(2, argv); - // Create a new array - Local array = Array::New(1); - // Set the first value - array->Set(0, security_buffer); - // Create persistent handle - arrayObject = Persistent::New(array); - // Create descriptor - security_obj = new SecurityBufferDescriptor(arrayObject); - } else { - arrayObject = Persistent::New(Handle::Cast(args[0])); - security_obj = new SecurityBufferDescriptor(arrayObject); - } - - // Wrap it - security_obj->Wrap(args.This()); - // Return the object - return args.This(); -} - -Handle SecurityBufferDescriptor::ToBuffer(const Arguments &args) { - HandleScope scope; - - // Unpack the Security Buffer object - SecurityBufferDescriptor *security_obj = ObjectWrap::Unwrap(args.This()); - - // Get the buffer - char *buffer_data = security_obj->toBuffer(); - size_t buffer_size = security_obj->bufferSize(); - - // Create a Buffer - Buffer *buffer = Buffer::New(buffer_data, buffer_size); - - // Return the buffer - return scope.Close(buffer->handle_); -} - -void SecurityBufferDescriptor::Initialize(Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("SecurityBufferDescriptor")); - - // Set up method for the Kerberos instance - NODE_SET_PROTOTYPE_METHOD(constructor_template, "toBuffer", ToBuffer); - - target->Set(String::NewSymbol("SecurityBufferDescriptor"), constructor_template->GetFunction()); -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h deleted file mode 100644 index 858863259..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.h +++ /dev/null @@ -1,44 +0,0 @@ -#ifndef SECURITY_BUFFER_DESCRIPTOR_H -#define SECURITY_BUFFER_DESCRIPTOR_H - -#include -#include -#include - -#include -#include - -using namespace v8; -using namespace node; - -class SecurityBufferDescriptor : public ObjectWrap { - public: - Persistent arrayObject; - SecBufferDesc secBufferDesc; - - SecurityBufferDescriptor(); - SecurityBufferDescriptor(Persistent arrayObject); - ~SecurityBufferDescriptor(); - - // Has instance check - static inline bool HasInstance(Handle val) { - if (!val->IsObject()) return false; - Local obj = val->ToObject(); - return constructor_template->HasInstance(obj); - }; - - char *toBuffer(); - size_t bufferSize(); - - // Functions available from V8 - static void Initialize(Handle target); - static Handle ToBuffer(const Arguments &args); - - // Constructor used for creating new Long objects from C++ - static Persistent constructor_template; - - private: - static Handle New(const Arguments &args); -}; - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js deleted file mode 100644 index 9421392ea..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_buffer_descriptor.js +++ /dev/null @@ -1,3 +0,0 @@ -var SecurityBufferDescriptorNative = require('../../../build/Release/kerberos').SecurityBufferDescriptor; -// Export the modified class -exports.SecurityBufferDescriptor = SecurityBufferDescriptorNative; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc deleted file mode 100644 index 8c3691a7e..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.cc +++ /dev/null @@ -1,1211 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "security_context.h" -#include "security_buffer_descriptor.h" - -#ifndef ARRAY_SIZE -# define ARRAY_SIZE(a) (sizeof((a)) / sizeof((a)[0])) -#endif - -static LPSTR DisplaySECError(DWORD ErrCode); - -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -}; - -static Handle VExceptionErrNo(const char *msg, const int errorNumber) { - HandleScope scope; - - Local err = Exception::Error(String::New(msg)); - Local obj = err->ToObject(); - obj->Set(NODE_PSYMBOL("code"), Int32::New(errorNumber)); - return ThrowException(err); -}; - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// UV Lib callbacks -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void Process(uv_work_t* work_req) { - // Grab the worker - Worker *worker = static_cast(work_req->data); - // Execute the worker code - worker->execute(worker); -} - -static void After(uv_work_t* work_req) { - // Grab the scope of the call from Node - v8::HandleScope scope; - - // Get the worker reference - Worker *worker = static_cast(work_req->data); - - // If we have an error - if(worker->error) { - v8::Local err = v8::Exception::Error(v8::String::New(worker->error_message)); - Local obj = err->ToObject(); - obj->Set(NODE_PSYMBOL("code"), Int32::New(worker->error_code)); - v8::Local args[2] = { err, v8::Local::New(v8::Null()) }; - // Execute the error - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } else { - // // Map the data - v8::Handle result = worker->mapper(worker); - // Set up the callback with a null first - v8::Handle args[2] = { v8::Local::New(v8::Null()), result}; - // Wrap the callback function call in a TryCatch so that we can call - // node's FatalException afterwards. This makes it possible to catch - // the exception from JavaScript land using the - // process.on('uncaughtException') event. - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } - - // Clean up the memory - worker->callback.Dispose(); - free(worker->parameters); - delete worker; -} - -Persistent SecurityContext::constructor_template; - -SecurityContext::SecurityContext() : ObjectWrap() { -} - -SecurityContext::~SecurityContext() { - if(this->hasContext) { - _sspi_DeleteSecurityContext(&this->m_Context); - } -} - -Handle SecurityContext::New(const Arguments &args) { - HandleScope scope; - - PSecurityFunctionTable pSecurityInterface = NULL; - DWORD dwNumOfPkgs; - SECURITY_STATUS status; - - // Create code object - SecurityContext *security_obj = new SecurityContext(); - // Get security table interface - pSecurityInterface = _ssip_InitSecurityInterface(); - // Call the security interface - status = (*pSecurityInterface->EnumerateSecurityPackages)( - &dwNumOfPkgs, - &security_obj->m_PkgInfo); - if(status != SEC_E_OK) { - printf(TEXT("Failed in retrieving security packages, Error: %x"), GetLastError()); - return VException("Failed in retrieving security packages"); - } - - // Wrap it - security_obj->Wrap(args.This()); - // Return the object - return args.This(); -} - -Handle SecurityContext::InitializeContextSync(const Arguments &args) { - HandleScope scope; - char *service_principal_name_str = NULL, *input_str = NULL, *decoded_input_str = NULL; - BYTE *out_bound_data_str = NULL; - int decoded_input_str_length = NULL; - // Store reference to security credentials - SecurityCredentials *security_credentials = NULL; - // Status of operation - SECURITY_STATUS status; - - // We need 3 parameters - if(args.Length() != 3) - return VException("Initialize must be called with either [credential:SecurityCredential, servicePrincipalName:string, input:string]"); - - // First parameter must be an instance of SecurityCredentials - if(!SecurityCredentials::HasInstance(args[0])) - return VException("First parameter for Initialize must be an instance of SecurityCredentials"); - - // Second parameter must be a string - if(!args[1]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Third parameter must be a base64 encoded string - if(!args[2]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Let's unpack the values - Local service_principal_name = args[1]->ToString(); - service_principal_name_str = (char *)calloc(service_principal_name->Utf8Length() + 1, sizeof(char)); - service_principal_name->WriteUtf8(service_principal_name_str); - - // Unpack the user name - Local input = args[2]->ToString(); - - if(input->Utf8Length() > 0) { - input_str = (char *)calloc(input->Utf8Length() + 1, sizeof(char)); - input->WriteUtf8(input_str); - - // Now let's get the base64 decoded string - decoded_input_str = (char *)base64_decode(input_str, &decoded_input_str_length); - } - - // Unpack the Security credentials - security_credentials = ObjectWrap::Unwrap(args[0]->ToObject()); - - // Create Security context instance - Local security_context_value = constructor_template->GetFunction()->NewInstance(); - // Unwrap the security context - SecurityContext *security_context = ObjectWrap::Unwrap(security_context_value); - // Add a reference to the security_credentials - security_context->security_credentials = security_credentials; - - // Structures used for c calls - SecBufferDesc ibd, obd; - SecBuffer ib, ob; - - // - // Prepare data structure for returned data from SSPI - ob.BufferType = SECBUFFER_TOKEN; - ob.cbBuffer = security_context->m_PkgInfo->cbMaxToken; - // Allocate space for return data - out_bound_data_str = new BYTE[ob.cbBuffer + sizeof(DWORD)]; - ob.pvBuffer = out_bound_data_str; - // prepare buffer description - obd.cBuffers = 1; - obd.ulVersion = SECBUFFER_VERSION; - obd.pBuffers = &ob; - - // - // Prepare the data we are passing to the SSPI method - if(input->Utf8Length() > 0) { - ib.BufferType = SECBUFFER_TOKEN; - ib.cbBuffer = decoded_input_str_length; - ib.pvBuffer = decoded_input_str; - // prepare buffer description - ibd.cBuffers = 1; - ibd.ulVersion = SECBUFFER_VERSION; - ibd.pBuffers = &ib; - } - - // Perform initialization step - status = _sspi_initializeSecurityContext( - &security_credentials->m_Credentials - , NULL - , const_cast(service_principal_name_str) - , 0x02 // MUTUAL - , 0 - , 0 // Network - , input->Utf8Length() > 0 ? &ibd : NULL - , 0 - , &security_context->m_Context - , &obd - , &security_context->CtxtAttr - , &security_context->Expiration - ); - - // If we have a ok or continue let's prepare the result - if(status == SEC_E_OK - || status == SEC_I_COMPLETE_NEEDED - || status == SEC_I_CONTINUE_NEEDED - || status == SEC_I_COMPLETE_AND_CONTINUE - ) { - security_context->hasContext = true; - security_context->payload = base64_encode((const unsigned char *)ob.pvBuffer, ob.cbBuffer); - } else { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - - // Return security context - return scope.Close(security_context_value); -} - -// -// Async InitializeContext -// -typedef struct SecurityContextStaticInitializeCall { - char *service_principal_name_str; - char *decoded_input_str; - int decoded_input_str_length; - SecurityContext *context; -} SecurityContextStaticInitializeCall; - -static void _initializeContext(Worker *worker) { - // Status of operation - SECURITY_STATUS status; - BYTE *out_bound_data_str = NULL; - SecurityContextStaticInitializeCall *call = (SecurityContextStaticInitializeCall *)worker->parameters; - - // Structures used for c calls - SecBufferDesc ibd, obd; - SecBuffer ib, ob; - - // - // Prepare data structure for returned data from SSPI - ob.BufferType = SECBUFFER_TOKEN; - ob.cbBuffer = call->context->m_PkgInfo->cbMaxToken; - // Allocate space for return data - out_bound_data_str = new BYTE[ob.cbBuffer + sizeof(DWORD)]; - ob.pvBuffer = out_bound_data_str; - // prepare buffer description - obd.cBuffers = 1; - obd.ulVersion = SECBUFFER_VERSION; - obd.pBuffers = &ob; - - // - // Prepare the data we are passing to the SSPI method - if(call->decoded_input_str_length > 0) { - ib.BufferType = SECBUFFER_TOKEN; - ib.cbBuffer = call->decoded_input_str_length; - ib.pvBuffer = call->decoded_input_str; - // prepare buffer description - ibd.cBuffers = 1; - ibd.ulVersion = SECBUFFER_VERSION; - ibd.pBuffers = &ib; - } - - // Perform initialization step - status = _sspi_initializeSecurityContext( - &call->context->security_credentials->m_Credentials - , NULL - , const_cast(call->service_principal_name_str) - , 0x02 // MUTUAL - , 0 - , 0 // Network - , call->decoded_input_str_length > 0 ? &ibd : NULL - , 0 - , &call->context->m_Context - , &obd - , &call->context->CtxtAttr - , &call->context->Expiration - ); - - // If we have a ok or continue let's prepare the result - if(status == SEC_E_OK - || status == SEC_I_COMPLETE_NEEDED - || status == SEC_I_CONTINUE_NEEDED - || status == SEC_I_COMPLETE_AND_CONTINUE - ) { - call->context->hasContext = true; - call->context->payload = base64_encode((const unsigned char *)ob.pvBuffer, ob.cbBuffer); - - // Set the context - worker->return_code = status; - worker->return_value = call->context; - } else { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } - - // Clean up data - if(call->decoded_input_str != NULL) free(call->decoded_input_str); - if(call->service_principal_name_str != NULL) free(call->service_principal_name_str); -} - -static Handle _map_initializeContext(Worker *worker) { - HandleScope scope; - - // Unwrap the security context - SecurityContext *context = (SecurityContext *)worker->return_value; - // Return the value - return scope.Close(context->handle_); -} - -Handle SecurityContext::InitializeContext(const Arguments &args) { - HandleScope scope; - char *service_principal_name_str = NULL, *input_str = NULL, *decoded_input_str = NULL; - int decoded_input_str_length = NULL; - // Store reference to security credentials - SecurityCredentials *security_credentials = NULL; - - // We need 3 parameters - if(args.Length() != 4) - return VException("Initialize must be called with [credential:SecurityCredential, servicePrincipalName:string, input:string, callback:function]"); - - // First parameter must be an instance of SecurityCredentials - if(!SecurityCredentials::HasInstance(args[0])) - return VException("First parameter for Initialize must be an instance of SecurityCredentials"); - - // Second parameter must be a string - if(!args[1]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Third parameter must be a base64 encoded string - if(!args[2]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Third parameter must be a callback - if(!args[3]->IsFunction()) - return VException("Third parameter for Initialize must be a callback function"); - - // Let's unpack the values - Local service_principal_name = args[1]->ToString(); - service_principal_name_str = (char *)calloc(service_principal_name->Utf8Length() + 1, sizeof(char)); - service_principal_name->WriteUtf8(service_principal_name_str); - - // Unpack the user name - Local input = args[2]->ToString(); - - if(input->Utf8Length() > 0) { - input_str = (char *)calloc(input->Utf8Length() + 1, sizeof(char)); - input->WriteUtf8(input_str); - - // Now let's get the base64 decoded string - decoded_input_str = (char *)base64_decode(input_str, &decoded_input_str_length); - // Free original allocation - free(input_str); - } - - // Unpack the Security credentials - security_credentials = ObjectWrap::Unwrap(args[0]->ToObject()); - // Create Security context instance - Local security_context_value = constructor_template->GetFunction()->NewInstance(); - // Unwrap the security context - SecurityContext *security_context = ObjectWrap::Unwrap(security_context_value); - // Add a reference to the security_credentials - security_context->security_credentials = security_credentials; - - // Build the call function - SecurityContextStaticInitializeCall *call = (SecurityContextStaticInitializeCall *)calloc(1, sizeof(SecurityContextStaticInitializeCall)); - call->context = security_context; - call->decoded_input_str = decoded_input_str; - call->decoded_input_str_length = decoded_input_str_length; - call->service_principal_name_str = service_principal_name_str; - - // Callback - Local callback = Local::Cast(args[3]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _initializeContext; - worker->mapper = _map_initializeContext; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Process, (uv_after_work_cb)After); - - // Return no value - return scope.Close(Undefined()); -} - -Handle SecurityContext::PayloadGetter(Local property, const AccessorInfo& info) { - HandleScope scope; - // Unpack the context object - SecurityContext *context = ObjectWrap::Unwrap(info.Holder()); - // Return the low bits - return scope.Close(String::New(context->payload)); -} - -Handle SecurityContext::HasContextGetter(Local property, const AccessorInfo& info) { - HandleScope scope; - // Unpack the context object - SecurityContext *context = ObjectWrap::Unwrap(info.Holder()); - // Return the low bits - return scope.Close(Boolean::New(context->hasContext)); -} - -// -// Async InitializeContextStep -// -typedef struct SecurityContextStepStaticInitializeCall { - char *service_principal_name_str; - char *decoded_input_str; - int decoded_input_str_length; - SecurityContext *context; -} SecurityContextStepStaticInitializeCall; - -static void _initializeContextStep(Worker *worker) { - // Outbound data array - BYTE *out_bound_data_str = NULL; - // Status of operation - SECURITY_STATUS status; - // Unpack data - SecurityContextStepStaticInitializeCall *call = (SecurityContextStepStaticInitializeCall *)worker->parameters; - SecurityContext *context = call->context; - // Structures used for c calls - SecBufferDesc ibd, obd; - SecBuffer ib, ob; - - // - // Prepare data structure for returned data from SSPI - ob.BufferType = SECBUFFER_TOKEN; - ob.cbBuffer = context->m_PkgInfo->cbMaxToken; - // Allocate space for return data - out_bound_data_str = new BYTE[ob.cbBuffer + sizeof(DWORD)]; - ob.pvBuffer = out_bound_data_str; - // prepare buffer description - obd.cBuffers = 1; - obd.ulVersion = SECBUFFER_VERSION; - obd.pBuffers = &ob; - - // - // Prepare the data we are passing to the SSPI method - if(call->decoded_input_str_length > 0) { - ib.BufferType = SECBUFFER_TOKEN; - ib.cbBuffer = call->decoded_input_str_length; - ib.pvBuffer = call->decoded_input_str; - // prepare buffer description - ibd.cBuffers = 1; - ibd.ulVersion = SECBUFFER_VERSION; - ibd.pBuffers = &ib; - } - - // Perform initialization step - status = _sspi_initializeSecurityContext( - &context->security_credentials->m_Credentials - , context->hasContext == true ? &context->m_Context : NULL - , const_cast(call->service_principal_name_str) - , 0x02 // MUTUAL - , 0 - , 0 // Network - , call->decoded_input_str_length ? &ibd : NULL - , 0 - , &context->m_Context - , &obd - , &context->CtxtAttr - , &context->Expiration - ); - - // If we have a ok or continue let's prepare the result - if(status == SEC_E_OK - || status == SEC_I_COMPLETE_NEEDED - || status == SEC_I_CONTINUE_NEEDED - || status == SEC_I_COMPLETE_AND_CONTINUE - ) { - // Set the new payload - if(context->payload != NULL) free(context->payload); - context->payload = base64_encode((const unsigned char *)ob.pvBuffer, ob.cbBuffer); - worker->return_code = status; - worker->return_value = context; - } else { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } - - // Clean up data - if(call->decoded_input_str != NULL) free(call->decoded_input_str); - if(call->service_principal_name_str != NULL) free(call->service_principal_name_str); -} - -static Handle _map_initializeContextStep(Worker *worker) { - HandleScope scope; - // Unwrap the security context - SecurityContext *context = (SecurityContext *)worker->return_value; - // Return the value - return scope.Close(context->handle_); -} - -Handle SecurityContext::InitalizeStep(const Arguments &args) { - HandleScope scope; - - char *service_principal_name_str = NULL, *input_str = NULL, *decoded_input_str = NULL; - int decoded_input_str_length = NULL; - - // We need 3 parameters - if(args.Length() != 3) - return VException("Initialize must be called with [servicePrincipalName:string, input:string, callback:function]"); - - // Second parameter must be a string - if(!args[0]->IsString()) - return VException("First parameter for Initialize must be a string"); - - // Third parameter must be a base64 encoded string - if(!args[1]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Third parameter must be a base64 encoded string - if(!args[2]->IsFunction()) - return VException("Third parameter for Initialize must be a callback function"); - - // Let's unpack the values - Local service_principal_name = args[0]->ToString(); - service_principal_name_str = (char *)calloc(service_principal_name->Utf8Length() + 1, sizeof(char)); - service_principal_name->WriteUtf8(service_principal_name_str); - - // Unpack the user name - Local input = args[1]->ToString(); - - if(input->Utf8Length() > 0) { - input_str = (char *)calloc(input->Utf8Length() + 1, sizeof(char)); - input->WriteUtf8(input_str); - // Now let's get the base64 decoded string - decoded_input_str = (char *)base64_decode(input_str, &decoded_input_str_length); - // Free input string - free(input_str); - } - - // Unwrap the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - - // Create call structure - SecurityContextStepStaticInitializeCall *call = (SecurityContextStepStaticInitializeCall *)calloc(1, sizeof(SecurityContextStepStaticInitializeCall)); - call->context = security_context; - call->decoded_input_str = decoded_input_str; - call->decoded_input_str_length = decoded_input_str_length; - call->service_principal_name_str = service_principal_name_str; - - // Callback - Local callback = Local::Cast(args[2]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _initializeContextStep; - worker->mapper = _map_initializeContextStep; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Process, (uv_after_work_cb)After); - - // Return undefined - return scope.Close(Undefined()); -} - -Handle SecurityContext::InitalizeStepSync(const Arguments &args) { - HandleScope scope; - - char *service_principal_name_str = NULL, *input_str = NULL, *decoded_input_str = NULL; - BYTE *out_bound_data_str = NULL; - int decoded_input_str_length = NULL; - // Status of operation - SECURITY_STATUS status; - - // We need 3 parameters - if(args.Length() != 2) - return VException("Initialize must be called with [servicePrincipalName:string, input:string]"); - - // Second parameter must be a string - if(!args[0]->IsString()) - return VException("First parameter for Initialize must be a string"); - - // Third parameter must be a base64 encoded string - if(!args[1]->IsString()) - return VException("Second parameter for Initialize must be a string"); - - // Let's unpack the values - Local service_principal_name = args[0]->ToString(); - service_principal_name_str = (char *)calloc(service_principal_name->Utf8Length() + 1, sizeof(char)); - service_principal_name->WriteUtf8(service_principal_name_str); - - // Unpack the user name - Local input = args[1]->ToString(); - - if(input->Utf8Length() > 0) { - input_str = (char *)calloc(input->Utf8Length() + 1, sizeof(char)); - input->WriteUtf8(input_str); - // Now let's get the base64 decoded string - decoded_input_str = (char *)base64_decode(input_str, &decoded_input_str_length); - } - - // Unpack the long object - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - SecurityCredentials *security_credentials = security_context->security_credentials; - - // Structures used for c calls - SecBufferDesc ibd, obd; - SecBuffer ib, ob; - - // - // Prepare data structure for returned data from SSPI - ob.BufferType = SECBUFFER_TOKEN; - ob.cbBuffer = security_context->m_PkgInfo->cbMaxToken; - // Allocate space for return data - out_bound_data_str = new BYTE[ob.cbBuffer + sizeof(DWORD)]; - ob.pvBuffer = out_bound_data_str; - // prepare buffer description - obd.cBuffers = 1; - obd.ulVersion = SECBUFFER_VERSION; - obd.pBuffers = &ob; - - // - // Prepare the data we are passing to the SSPI method - if(input->Utf8Length() > 0) { - ib.BufferType = SECBUFFER_TOKEN; - ib.cbBuffer = decoded_input_str_length; - ib.pvBuffer = decoded_input_str; - // prepare buffer description - ibd.cBuffers = 1; - ibd.ulVersion = SECBUFFER_VERSION; - ibd.pBuffers = &ib; - } - - // Perform initialization step - status = _sspi_initializeSecurityContext( - &security_credentials->m_Credentials - , security_context->hasContext == true ? &security_context->m_Context : NULL - , const_cast(service_principal_name_str) - , 0x02 // MUTUAL - , 0 - , 0 // Network - , input->Utf8Length() > 0 ? &ibd : NULL - , 0 - , &security_context->m_Context - , &obd - , &security_context->CtxtAttr - , &security_context->Expiration - ); - - // If we have a ok or continue let's prepare the result - if(status == SEC_E_OK - || status == SEC_I_COMPLETE_NEEDED - || status == SEC_I_CONTINUE_NEEDED - || status == SEC_I_COMPLETE_AND_CONTINUE - ) { - // Set the new payload - if(security_context->payload != NULL) free(security_context->payload); - security_context->payload = base64_encode((const unsigned char *)ob.pvBuffer, ob.cbBuffer); - } else { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - - return scope.Close(Null()); -} - -// -// Async EncryptMessage -// -typedef struct SecurityContextEncryptMessageCall { - SecurityContext *context; - SecurityBufferDescriptor *descriptor; - unsigned long flags; -} SecurityContextEncryptMessageCall; - -static void _encryptMessage(Worker *worker) { - SECURITY_STATUS status; - // Unpack call - SecurityContextEncryptMessageCall *call = (SecurityContextEncryptMessageCall *)worker->parameters; - // Unpack the security context - SecurityContext *context = call->context; - SecurityBufferDescriptor *descriptor = call->descriptor; - - // Let's execute encryption - status = _sspi_EncryptMessage( - &context->m_Context - , call->flags - , &descriptor->secBufferDesc - , 0 - ); - - // We've got ok - if(status == SEC_E_OK) { - int bytesToAllocate = (int)descriptor->bufferSize(); - // Free up existing payload - if(context->payload != NULL) free(context->payload); - // Save the payload - context->payload = base64_encode((unsigned char *)descriptor->toBuffer(), bytesToAllocate); - // Set result - worker->return_code = status; - worker->return_value = context; - } else { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } -} - -static Handle _map_encryptMessage(Worker *worker) { - HandleScope scope; - // Unwrap the security context - SecurityContext *context = (SecurityContext *)worker->return_value; - // Return the value - return scope.Close(context->handle_); -} - -Handle SecurityContext::EncryptMessage(const Arguments &args) { - HandleScope scope; - - if(args.Length() != 3) - return VException("EncryptMessage takes an instance of SecurityBufferDescriptor, an integer flag and a callback function"); - if(!SecurityBufferDescriptor::HasInstance(args[0])) - return VException("EncryptMessage takes an instance of SecurityBufferDescriptor, an integer flag and a callback function"); - if(!args[1]->IsUint32()) - return VException("EncryptMessage takes an instance of SecurityBufferDescriptor, an integer flag and a callback function"); - if(!args[2]->IsFunction()) - return VException("EncryptMessage takes an instance of SecurityBufferDescriptor, an integer flag and a callback function"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - - // Unpack the descriptor - SecurityBufferDescriptor *descriptor = ObjectWrap::Unwrap(args[0]->ToObject()); - - // Create call structure - SecurityContextEncryptMessageCall *call = (SecurityContextEncryptMessageCall *)calloc(1, sizeof(SecurityContextEncryptMessageCall)); - call->context = security_context; - call->descriptor = descriptor; - call->flags = (unsigned long)args[1]->ToInteger()->Value(); - - // Callback - Local callback = Local::Cast(args[2]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _encryptMessage; - worker->mapper = _map_encryptMessage; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Process, (uv_after_work_cb)After); - - // Return undefined - return scope.Close(Undefined()); -} - -Handle SecurityContext::EncryptMessageSync(const Arguments &args) { - HandleScope scope; - SECURITY_STATUS status; - - if(args.Length() != 2) - return VException("EncryptMessageSync takes an instance of SecurityBufferDescriptor and an integer flag"); - if(!SecurityBufferDescriptor::HasInstance(args[0])) - return VException("EncryptMessageSync takes an instance of SecurityBufferDescriptor and an integer flag"); - if(!args[1]->IsUint32()) - return VException("EncryptMessageSync takes an instance of SecurityBufferDescriptor and an integer flag"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - - // Unpack the descriptor - SecurityBufferDescriptor *descriptor = ObjectWrap::Unwrap(args[0]->ToObject()); - - // Let's execute encryption - status = _sspi_EncryptMessage( - &security_context->m_Context - , (unsigned long)args[1]->ToInteger()->Value() - , &descriptor->secBufferDesc - , 0 - ); - - // We've got ok - if(status == SEC_E_OK) { - int bytesToAllocate = (int)descriptor->bufferSize(); - // Free up existing payload - if(security_context->payload != NULL) free(security_context->payload); - // Save the payload - security_context->payload = base64_encode((unsigned char *)descriptor->toBuffer(), bytesToAllocate); - } else { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - - return scope.Close(Null()); -} - -// -// Async DecryptMessage -// -typedef struct SecurityContextDecryptMessageCall { - SecurityContext *context; - SecurityBufferDescriptor *descriptor; -} SecurityContextDecryptMessageCall; - -static void _decryptMessage(Worker *worker) { - unsigned long quality = 0; - SECURITY_STATUS status; - - // Unpack parameters - SecurityContextDecryptMessageCall *call = (SecurityContextDecryptMessageCall *)worker->parameters; - SecurityContext *context = call->context; - SecurityBufferDescriptor *descriptor = call->descriptor; - - // Let's execute encryption - status = _sspi_DecryptMessage( - &context->m_Context - , &descriptor->secBufferDesc - , 0 - , (unsigned long)&quality - ); - - // We've got ok - if(status == SEC_E_OK) { - int bytesToAllocate = (int)descriptor->bufferSize(); - // Free up existing payload - if(context->payload != NULL) free(context->payload); - // Save the payload - context->payload = base64_encode((unsigned char *)descriptor->toBuffer(), bytesToAllocate); - // Set return values - worker->return_code = status; - worker->return_value = context; - } else { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } -} - -static Handle _map_decryptMessage(Worker *worker) { - HandleScope scope; - // Unwrap the security context - SecurityContext *context = (SecurityContext *)worker->return_value; - // Return the value - return scope.Close(context->handle_); -} - -Handle SecurityContext::DecryptMessage(const Arguments &args) { - HandleScope scope; - - if(args.Length() != 2) - return VException("DecryptMessage takes an instance of SecurityBufferDescriptor and a callback function"); - if(!SecurityBufferDescriptor::HasInstance(args[0])) - return VException("DecryptMessage takes an instance of SecurityBufferDescriptor and a callback function"); - if(!args[1]->IsFunction()) - return VException("DecryptMessage takes an instance of SecurityBufferDescriptor and a callback function"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - // Unpack the descriptor - SecurityBufferDescriptor *descriptor = ObjectWrap::Unwrap(args[0]->ToObject()); - // Create call structure - SecurityContextDecryptMessageCall *call = (SecurityContextDecryptMessageCall *)calloc(1, sizeof(SecurityContextDecryptMessageCall)); - call->context = security_context; - call->descriptor = descriptor; - - // Callback - Local callback = Local::Cast(args[1]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _decryptMessage; - worker->mapper = _map_decryptMessage; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Process, (uv_after_work_cb)After); - - // Return undefined - return scope.Close(Undefined()); -} - -Handle SecurityContext::DecryptMessageSync(const Arguments &args) { - HandleScope scope; - unsigned long quality = 0; - SECURITY_STATUS status; - - if(args.Length() != 1) - return VException("DecryptMessageSync takes an instance of SecurityBufferDescriptor"); - if(!SecurityBufferDescriptor::HasInstance(args[0])) - return VException("DecryptMessageSync takes an instance of SecurityBufferDescriptor"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - - // Unpack the descriptor - SecurityBufferDescriptor *descriptor = ObjectWrap::Unwrap(args[0]->ToObject()); - - // Let's execute encryption - status = _sspi_DecryptMessage( - &security_context->m_Context - , &descriptor->secBufferDesc - , 0 - , (unsigned long)&quality - ); - - // We've got ok - if(status == SEC_E_OK) { - int bytesToAllocate = (int)descriptor->bufferSize(); - // Free up existing payload - if(security_context->payload != NULL) free(security_context->payload); - // Save the payload - security_context->payload = base64_encode((unsigned char *)descriptor->toBuffer(), bytesToAllocate); - } else { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - - return scope.Close(Null()); -} - -// -// Async QueryContextAttributes -// -typedef struct SecurityContextQueryContextAttributesCall { - SecurityContext *context; - uint32_t attribute; -} SecurityContextQueryContextAttributesCall; - -static void _queryContextAttributes(Worker *worker) { - SECURITY_STATUS status; - - // Cast to data structure - SecurityContextQueryContextAttributesCall *call = (SecurityContextQueryContextAttributesCall *)worker->parameters; - - // Allocate some space - SecPkgContext_Sizes *sizes = (SecPkgContext_Sizes *)calloc(1, sizeof(SecPkgContext_Sizes)); - // Let's grab the query context attribute - status = _sspi_QueryContextAttributes( - &call->context->m_Context, - call->attribute, - sizes - ); - - if(status == SEC_E_OK) { - worker->return_code = status; - worker->return_value = sizes; - } else { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } -} - -static Handle _map_queryContextAttributes(Worker *worker) { - HandleScope scope; - - // Cast to data structure - SecurityContextQueryContextAttributesCall *call = (SecurityContextQueryContextAttributesCall *)worker->parameters; - // Unpack the attribute - uint32_t attribute = call->attribute; - - // Convert data - if(attribute == SECPKG_ATTR_SIZES) { - SecPkgContext_Sizes *sizes = (SecPkgContext_Sizes *)worker->return_value; - // Create object - Local value = Object::New(); - value->Set(String::New("maxToken"), Integer::New(sizes->cbMaxToken)); - value->Set(String::New("maxSignature"), Integer::New(sizes->cbMaxSignature)); - value->Set(String::New("blockSize"), Integer::New(sizes->cbBlockSize)); - value->Set(String::New("securityTrailer"), Integer::New(sizes->cbSecurityTrailer)); - return scope.Close(value); - } - - // Return the value - return scope.Close(Null()); -} - -Handle SecurityContext::QueryContextAttributes(const Arguments &args) { - HandleScope scope; - - if(args.Length() != 2) - return VException("QueryContextAttributesSync method takes a an integer Attribute specifier and a callback function"); - if(!args[0]->IsInt32()) - return VException("QueryContextAttributes method takes a an integer Attribute specifier and a callback function"); - if(!args[1]->IsFunction()) - return VException("QueryContextAttributes method takes a an integer Attribute specifier and a callback function"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - - // Unpack the int value - uint32_t attribute = args[0]->ToInt32()->Value(); - - // Check that we have a supported attribute - if(attribute != SECPKG_ATTR_SIZES) - return VException("QueryContextAttributes only supports the SECPKG_ATTR_SIZES attribute"); - - // Create call structure - SecurityContextQueryContextAttributesCall *call = (SecurityContextQueryContextAttributesCall *)calloc(1, sizeof(SecurityContextQueryContextAttributesCall)); - call->attribute = attribute; - call->context = security_context; - - // Callback - Local callback = Local::Cast(args[1]); - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _queryContextAttributes; - worker->mapper = _map_queryContextAttributes; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, Process, (uv_after_work_cb)After); - - // Return undefined - return scope.Close(Undefined()); -} - -Handle SecurityContext::QueryContextAttributesSync(const Arguments &args) { - HandleScope scope; - SECURITY_STATUS status; - - if(args.Length() != 1) - return VException("QueryContextAttributesSync method takes a an integer Attribute specifier"); - if(!args[0]->IsInt32()) - return VException("QueryContextAttributesSync method takes a an integer Attribute specifier"); - - // Unpack the security context - SecurityContext *security_context = ObjectWrap::Unwrap(args.This()); - uint32_t attribute = args[0]->ToInt32()->Value(); - - if(attribute != SECPKG_ATTR_SIZES) - return VException("QueryContextAttributes only supports the SECPKG_ATTR_SIZES attribute"); - - // Check what attribute we are asking for - if(attribute == SECPKG_ATTR_SIZES) { - SecPkgContext_Sizes sizes; - - // Let's grab the query context attribute - status = _sspi_QueryContextAttributes( - &security_context->m_Context, - attribute, - &sizes - ); - - if(status == SEC_E_OK) { - Local value = Object::New(); - value->Set(String::New("maxToken"), Integer::New(sizes.cbMaxToken)); - value->Set(String::New("maxSignature"), Integer::New(sizes.cbMaxSignature)); - value->Set(String::New("blockSize"), Integer::New(sizes.cbBlockSize)); - value->Set(String::New("securityTrailer"), Integer::New(sizes.cbSecurityTrailer)); - return scope.Close(value); - } else { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - } - - return scope.Close(Null()); -} - -void SecurityContext::Initialize(Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("SecurityContext")); - - // Class methods - NODE_SET_METHOD(constructor_template, "initializeSync", InitializeContextSync); - NODE_SET_METHOD(constructor_template, "initialize", InitializeContext); - - // Set up method for the instance - NODE_SET_PROTOTYPE_METHOD(constructor_template, "initializeSync", InitalizeStepSync); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "initialize", InitalizeStep); - - NODE_SET_PROTOTYPE_METHOD(constructor_template, "decryptMessageSync", DecryptMessageSync); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "decryptMessage", DecryptMessage); - - NODE_SET_PROTOTYPE_METHOD(constructor_template, "queryContextAttributesSync", QueryContextAttributesSync); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "queryContextAttributes", QueryContextAttributes); - - NODE_SET_PROTOTYPE_METHOD(constructor_template, "encryptMessageSync", EncryptMessageSync); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "encryptMessage", EncryptMessage); - - // Getters for correct serialization of the object - constructor_template->InstanceTemplate()->SetAccessor(String::NewSymbol("payload"), PayloadGetter); - // Getters for correct serialization of the object - constructor_template->InstanceTemplate()->SetAccessor(String::NewSymbol("hasContext"), HasContextGetter); - - // Set template class name - target->Set(String::NewSymbol("SecurityContext"), constructor_template->GetFunction()); -} - -static LPSTR DisplaySECError(DWORD ErrCode) { - LPSTR pszName = NULL; // WinError.h - - switch(ErrCode) { - case SEC_E_BUFFER_TOO_SMALL: - pszName = "SEC_E_BUFFER_TOO_SMALL - The message buffer is too small. Used with the Digest SSP."; - break; - - case SEC_E_CRYPTO_SYSTEM_INVALID: - pszName = "SEC_E_CRYPTO_SYSTEM_INVALID - The cipher chosen for the security context is not supported. Used with the Digest SSP."; - break; - case SEC_E_INCOMPLETE_MESSAGE: - pszName = "SEC_E_INCOMPLETE_MESSAGE - The data in the input buffer is incomplete. The application needs to read more data from the server and call DecryptMessageSync (General) again."; - break; - - case SEC_E_INVALID_HANDLE: - pszName = "SEC_E_INVALID_HANDLE - A context handle that is not valid was specified in the phContext parameter. Used with the Digest and Schannel SSPs."; - break; - - case SEC_E_INVALID_TOKEN: - pszName = "SEC_E_INVALID_TOKEN - The buffers are of the wrong type or no buffer of type SECBUFFER_DATA was found. Used with the Schannel SSP."; - break; - - case SEC_E_MESSAGE_ALTERED: - pszName = "SEC_E_MESSAGE_ALTERED - The message has been altered. Used with the Digest and Schannel SSPs."; - break; - - case SEC_E_OUT_OF_SEQUENCE: - pszName = "SEC_E_OUT_OF_SEQUENCE - The message was not received in the correct sequence."; - break; - - case SEC_E_QOP_NOT_SUPPORTED: - pszName = "SEC_E_QOP_NOT_SUPPORTED - Neither confidentiality nor integrity are supported by the security context. Used with the Digest SSP."; - break; - - case SEC_I_CONTEXT_EXPIRED: - pszName = "SEC_I_CONTEXT_EXPIRED - The message sender has finished using the connection and has initiated a shutdown."; - break; - - case SEC_I_RENEGOTIATE: - pszName = "SEC_I_RENEGOTIATE - The remote party requires a new handshake sequence or the application has just initiated a shutdown."; - break; - - case SEC_E_ENCRYPT_FAILURE: - pszName = "SEC_E_ENCRYPT_FAILURE - The specified data could not be encrypted."; - break; - - case SEC_E_DECRYPT_FAILURE: - pszName = "SEC_E_DECRYPT_FAILURE - The specified data could not be decrypted."; - break; - case -1: - pszName = "Failed to load security.dll library"; - break; - } - - return pszName; -} - diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h deleted file mode 100644 index b0059e39e..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.h +++ /dev/null @@ -1,85 +0,0 @@ -#ifndef SECURITY_CONTEXT_H -#define SECURITY_CONTEXT_H - -#include -#include -#include - -#define SECURITY_WIN32 1 - -#include -#include -#include "security_credentials.h" -#include "../worker.h" - -extern "C" { - #include "../kerberos_sspi.h" - #include "../base64.h" -} - -using namespace v8; -using namespace node; - -class SecurityContext : public ObjectWrap { - public: - SecurityContext(); - ~SecurityContext(); - - // Security info package - PSecPkgInfo m_PkgInfo; - // Do we have a context - bool hasContext; - // Reference to security credentials - SecurityCredentials *security_credentials; - // Security context - CtxtHandle m_Context; - // Attributes - DWORD CtxtAttr; - // Expiry time for ticket - TimeStamp Expiration; - // Payload - char *payload; - - // Has instance check - static inline bool HasInstance(Handle val) { - if (!val->IsObject()) return false; - Local obj = val->ToObject(); - return constructor_template->HasInstance(obj); - }; - - // Functions available from V8 - static void Initialize(Handle target); - - static Handle InitializeContext(const Arguments &args); - static Handle InitializeContextSync(const Arguments &args); - - static Handle InitalizeStep(const Arguments &args); - static Handle InitalizeStepSync(const Arguments &args); - - static Handle DecryptMessage(const Arguments &args); - static Handle DecryptMessageSync(const Arguments &args); - - static Handle QueryContextAttributesSync(const Arguments &args); - static Handle QueryContextAttributes(const Arguments &args); - - static Handle EncryptMessageSync(const Arguments &args); - static Handle EncryptMessage(const Arguments &args); - - // Payload getter - static Handle PayloadGetter(Local property, const AccessorInfo& info); - // hasContext getter - static Handle HasContextGetter(Local property, const AccessorInfo& info); - - // Constructor used for creating new Long objects from C++ - static Persistent constructor_template; - - private: - // Create a new instance - static Handle New(const Arguments &args); - // // Handles the uv calls - // static void Process(uv_work_t* work_req); - // // Called after work is done - // static void After(uv_work_t* work_req); -}; - -#endif diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js deleted file mode 100644 index ef04e9274..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_context.js +++ /dev/null @@ -1,3 +0,0 @@ -var SecurityContextNative = require('../../../build/Release/kerberos').SecurityContext; -// Export the modified class -exports.SecurityContext = SecurityContextNative; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc deleted file mode 100644 index 025238b7c..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.cc +++ /dev/null @@ -1,468 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "security_credentials.h" - -#ifndef ARRAY_SIZE -# define ARRAY_SIZE(a) (sizeof((a)) / sizeof((a)[0])) -#endif - -static LPSTR DisplaySECError(DWORD ErrCode); - -static Handle VException(const char *msg) { - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -}; - -static Handle VExceptionErrNo(const char *msg, const int errorNumber) { - HandleScope scope; - - Local err = Exception::Error(String::New(msg)); - Local obj = err->ToObject(); - obj->Set(NODE_PSYMBOL("code"), Int32::New(errorNumber)); - return ThrowException(err); -}; - -Persistent SecurityCredentials::constructor_template; - -SecurityCredentials::SecurityCredentials() : ObjectWrap() { -} - -SecurityCredentials::~SecurityCredentials() { -} - -Handle SecurityCredentials::New(const Arguments &args) { - HandleScope scope; - - // Create security credentials instance - SecurityCredentials *security_credentials = new SecurityCredentials(); - // Wrap it - security_credentials->Wrap(args.This()); - // Return the object - return args.This(); -} - -Handle SecurityCredentials::AquireSync(const Arguments &args) { - HandleScope scope; - char *package_str = NULL, *username_str = NULL, *password_str = NULL, *domain_str = NULL; - // Status of operation - SECURITY_STATUS status; - - // Unpack the variables - if(args.Length() != 2 && args.Length() != 3 && args.Length() != 4) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string]]"); - - if(!args[0]->IsString()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string]]"); - - if(!args[1]->IsString()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string]]"); - - if(args.Length() == 3 && !args[2]->IsString()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string]]"); - - if(args.Length() == 4 && (!args[3]->IsString() && !args[3]->IsUndefined() && !args[3]->IsNull())) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string]]"); - - // Unpack the package - Local package = args[0]->ToString(); - package_str = (char *)calloc(package->Utf8Length() + 1, sizeof(char)); - package->WriteUtf8(package_str); - - // Unpack the user name - Local username = args[1]->ToString(); - username_str = (char *)calloc(username->Utf8Length() + 1, sizeof(char)); - username->WriteUtf8(username_str); - - // If we have a password - if(args.Length() == 3 || args.Length() == 4) { - Local password = args[2]->ToString(); - password_str = (char *)calloc(password->Utf8Length() + 1, sizeof(char)); - password->WriteUtf8(password_str); - } - - // If we have a domain - if(args.Length() == 4 && args[3]->IsString()) { - Local domain = args[3]->ToString(); - domain_str = (char *)calloc(domain->Utf8Length() + 1, sizeof(char)); - domain->WriteUtf8(domain_str); - } - - // Create Security instance - Local security_credentials_value = constructor_template->GetFunction()->NewInstance(); - - // Unwrap the credentials - SecurityCredentials *security_credentials = ObjectWrap::Unwrap(security_credentials_value); - - // If we have domain string - if(domain_str != NULL) { - security_credentials->m_Identity.Domain = USTR(_tcsdup(domain_str)); - security_credentials->m_Identity.DomainLength = (unsigned long)_tcslen(domain_str); - } else { - security_credentials->m_Identity.Domain = NULL; - security_credentials->m_Identity.DomainLength = 0; - } - - // Set up the user - security_credentials->m_Identity.User = USTR(_tcsdup(username_str)); - security_credentials->m_Identity.UserLength = (unsigned long)_tcslen(username_str); - - // If we have a password string - if(password_str != NULL) { - // Set up the password - security_credentials->m_Identity.Password = USTR(_tcsdup(password_str)); - security_credentials->m_Identity.PasswordLength = (unsigned long)_tcslen(password_str); - } - - #ifdef _UNICODE - security_credentials->m_Identity.Flags = SEC_WINNT_AUTH_IDENTITY_UNICODE; - #else - security_credentials->m_Identity.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI; - #endif - - // Attempt to acquire credentials - status = _sspi_AcquireCredentialsHandle( - NULL, - package_str, - SECPKG_CRED_OUTBOUND, - NULL, - password_str != NULL ? &security_credentials->m_Identity : NULL, - NULL, NULL, - &security_credentials->m_Credentials, - &security_credentials->Expiration - ); - - // We have an error - if(status != SEC_E_OK) { - LPSTR err_message = DisplaySECError(status); - - if(err_message != NULL) { - return VExceptionErrNo(err_message, status); - } else { - return VExceptionErrNo("Unknown error", status); - } - } - - // Make object persistent - Persistent persistent = Persistent::New(security_credentials_value); - // Return the object - return scope.Close(persistent); -} - -// Call structs -typedef struct SecurityCredentialCall { - char *package_str; - char *username_str; - char *password_str; - char *domain_str; - SecurityCredentials *credentials; -} SecurityCredentialCall; - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// authGSSClientInit -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -static void _authSSPIAquire(Worker *worker) { - // Status of operation - SECURITY_STATUS status; - - // Unpack data - SecurityCredentialCall *call = (SecurityCredentialCall *)worker->parameters; - - // Unwrap the credentials - SecurityCredentials *security_credentials = (SecurityCredentials *)call->credentials; - - // If we have domain string - if(call->domain_str != NULL) { - security_credentials->m_Identity.Domain = USTR(_tcsdup(call->domain_str)); - security_credentials->m_Identity.DomainLength = (unsigned long)_tcslen(call->domain_str); - } else { - security_credentials->m_Identity.Domain = NULL; - security_credentials->m_Identity.DomainLength = 0; - } - - // Set up the user - security_credentials->m_Identity.User = USTR(_tcsdup(call->username_str)); - security_credentials->m_Identity.UserLength = (unsigned long)_tcslen(call->username_str); - - // If we have a password string - if(call->password_str != NULL) { - // Set up the password - security_credentials->m_Identity.Password = USTR(_tcsdup(call->password_str)); - security_credentials->m_Identity.PasswordLength = (unsigned long)_tcslen(call->password_str); - } - - #ifdef _UNICODE - security_credentials->m_Identity.Flags = SEC_WINNT_AUTH_IDENTITY_UNICODE; - #else - security_credentials->m_Identity.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI; - #endif - - // Attempt to acquire credentials - status = _sspi_AcquireCredentialsHandle( - NULL, - call->package_str, - SECPKG_CRED_OUTBOUND, - NULL, - call->password_str != NULL ? &security_credentials->m_Identity : NULL, - NULL, NULL, - &security_credentials->m_Credentials, - &security_credentials->Expiration - ); - - // We have an error - if(status != SEC_E_OK) { - worker->error = TRUE; - worker->error_code = status; - worker->error_message = DisplaySECError(status); - } else { - worker->return_code = status; - worker->return_value = security_credentials; - } - - // Free up parameter structure - if(call->package_str != NULL) free(call->package_str); - if(call->domain_str != NULL) free(call->domain_str); - if(call->password_str != NULL) free(call->password_str); - if(call->username_str != NULL) free(call->username_str); - free(call); -} - -static Handle _map_authSSPIAquire(Worker *worker) { - HandleScope scope; - - // Unpack the credentials - SecurityCredentials *security_credentials = (SecurityCredentials *)worker->return_value; - // Make object persistent - Persistent persistent = Persistent::New(security_credentials->handle_); - // Return the object - return scope.Close(persistent); -} - -Handle SecurityCredentials::Aquire(const Arguments &args) { - HandleScope scope; - char *package_str = NULL, *username_str = NULL, *password_str = NULL, *domain_str = NULL; - // Unpack the variables - if(args.Length() != 2 && args.Length() != 3 && args.Length() != 4 && args.Length() != 5) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - if(!args[0]->IsString()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - if(!args[1]->IsString()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - if(args.Length() == 3 && (!args[2]->IsString() && !args[2]->IsFunction())) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - if(args.Length() == 4 && (!args[3]->IsString() && !args[3]->IsUndefined() && !args[3]->IsNull()) && !args[3]->IsFunction()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - if(args.Length() == 5 && !args[4]->IsFunction()) - return VException("Aquire must be called with either [package:string, username:string, [password:string, domain:string], callback:function]"); - - Local callback; - - // Figure out which parameter is the callback - if(args.Length() == 5) { - callback = Local::Cast(args[4]); - } else if(args.Length() == 4) { - callback = Local::Cast(args[3]); - } else if(args.Length() == 3) { - callback = Local::Cast(args[2]); - } - - // Unpack the package - Local package = args[0]->ToString(); - package_str = (char *)calloc(package->Utf8Length() + 1, sizeof(char)); - package->WriteUtf8(package_str); - - // Unpack the user name - Local username = args[1]->ToString(); - username_str = (char *)calloc(username->Utf8Length() + 1, sizeof(char)); - username->WriteUtf8(username_str); - - // If we have a password - if(args.Length() == 3 || args.Length() == 4 || args.Length() == 5) { - Local password = args[2]->ToString(); - password_str = (char *)calloc(password->Utf8Length() + 1, sizeof(char)); - password->WriteUtf8(password_str); - } - - // If we have a domain - if((args.Length() == 4 || args.Length() == 5) && args[3]->IsString()) { - Local domain = args[3]->ToString(); - domain_str = (char *)calloc(domain->Utf8Length() + 1, sizeof(char)); - domain->WriteUtf8(domain_str); - } - - // Create reference object - Local security_credentials_value = constructor_template->GetFunction()->NewInstance(); - // Unwrap object - SecurityCredentials *security_credentials = ObjectWrap::Unwrap(security_credentials_value); - - // Allocate call structure - SecurityCredentialCall *call = (SecurityCredentialCall *)calloc(1, sizeof(SecurityCredentialCall)); - call->domain_str = domain_str; - call->package_str = package_str; - call->password_str = password_str; - call->username_str = username_str; - call->credentials = security_credentials; - - // Let's allocate some space - Worker *worker = new Worker(); - worker->error = false; - worker->request.data = worker; - worker->callback = Persistent::New(callback); - worker->parameters = call; - worker->execute = _authSSPIAquire; - worker->mapper = _map_authSSPIAquire; - - // Schedule the worker with lib_uv - uv_queue_work(uv_default_loop(), &worker->request, SecurityCredentials::Process, (uv_after_work_cb)SecurityCredentials::After); - - // Return the undefined value - return scope.Close(Undefined()); -} - -void SecurityCredentials::Initialize(Handle target) { - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("SecurityCredentials")); - - // Class methods - NODE_SET_METHOD(constructor_template, "aquireSync", AquireSync); - NODE_SET_METHOD(constructor_template, "aquire", Aquire); - - // Set the class on the target module - target->Set(String::NewSymbol("SecurityCredentials"), constructor_template->GetFunction()); - - // Attempt to load the security.dll library - load_library(); -} - -static LPSTR DisplaySECError(DWORD ErrCode) { - LPSTR pszName = NULL; // WinError.h - - switch(ErrCode) { - case SEC_E_BUFFER_TOO_SMALL: - pszName = "SEC_E_BUFFER_TOO_SMALL - The message buffer is too small. Used with the Digest SSP."; - break; - - case SEC_E_CRYPTO_SYSTEM_INVALID: - pszName = "SEC_E_CRYPTO_SYSTEM_INVALID - The cipher chosen for the security context is not supported. Used with the Digest SSP."; - break; - case SEC_E_INCOMPLETE_MESSAGE: - pszName = "SEC_E_INCOMPLETE_MESSAGE - The data in the input buffer is incomplete. The application needs to read more data from the server and call DecryptMessage (General) again."; - break; - - case SEC_E_INVALID_HANDLE: - pszName = "SEC_E_INVALID_HANDLE - A context handle that is not valid was specified in the phContext parameter. Used with the Digest and Schannel SSPs."; - break; - - case SEC_E_INVALID_TOKEN: - pszName = "SEC_E_INVALID_TOKEN - The buffers are of the wrong type or no buffer of type SECBUFFER_DATA was found. Used with the Schannel SSP."; - break; - - case SEC_E_MESSAGE_ALTERED: - pszName = "SEC_E_MESSAGE_ALTERED - The message has been altered. Used with the Digest and Schannel SSPs."; - break; - - case SEC_E_OUT_OF_SEQUENCE: - pszName = "SEC_E_OUT_OF_SEQUENCE - The message was not received in the correct sequence."; - break; - - case SEC_E_QOP_NOT_SUPPORTED: - pszName = "SEC_E_QOP_NOT_SUPPORTED - Neither confidentiality nor integrity are supported by the security context. Used with the Digest SSP."; - break; - - case SEC_I_CONTEXT_EXPIRED: - pszName = "SEC_I_CONTEXT_EXPIRED - The message sender has finished using the connection and has initiated a shutdown."; - break; - - case SEC_I_RENEGOTIATE: - pszName = "SEC_I_RENEGOTIATE - The remote party requires a new handshake sequence or the application has just initiated a shutdown."; - break; - - case SEC_E_ENCRYPT_FAILURE: - pszName = "SEC_E_ENCRYPT_FAILURE - The specified data could not be encrypted."; - break; - - case SEC_E_DECRYPT_FAILURE: - pszName = "SEC_E_DECRYPT_FAILURE - The specified data could not be decrypted."; - break; - case -1: - pszName = "Failed to load security.dll library"; - break; - - } - - return pszName; -} - -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -// UV Lib callbacks -// +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -void SecurityCredentials::Process(uv_work_t* work_req) { - // Grab the worker - Worker *worker = static_cast(work_req->data); - // Execute the worker code - worker->execute(worker); -} - -void SecurityCredentials::After(uv_work_t* work_req) { - // Grab the scope of the call from Node - v8::HandleScope scope; - - // Get the worker reference - Worker *worker = static_cast(work_req->data); - - // If we have an error - if(worker->error) { - v8::Local err = v8::Exception::Error(v8::String::New(worker->error_message)); - Local obj = err->ToObject(); - obj->Set(NODE_PSYMBOL("code"), Int32::New(worker->error_code)); - v8::Local args[2] = { err, v8::Local::New(v8::Null()) }; - // Execute the error - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } else { - // // Map the data - v8::Handle result = worker->mapper(worker); - // Set up the callback with a null first - v8::Handle args[2] = { v8::Local::New(v8::Null()), result}; - // Wrap the callback function call in a TryCatch so that we can call - // node's FatalException afterwards. This makes it possible to catch - // the exception from JavaScript land using the - // process.on('uncaughtException') event. - v8::TryCatch try_catch; - // Call the callback - worker->callback->Call(v8::Context::GetCurrent()->Global(), ARRAY_SIZE(args), args); - // If we have an exception handle it as a fatalexception - if (try_catch.HasCaught()) { - node::FatalException(try_catch); - } - } - - // Clean up the memory - worker->callback.Dispose(); - delete worker; -} - diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h deleted file mode 100644 index 10b3edaa3..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.h +++ /dev/null @@ -1,67 +0,0 @@ -#ifndef SECURITY_CREDENTIALS_H -#define SECURITY_CREDENTIALS_H - -#include -#include -#include - -#define SECURITY_WIN32 1 - -#include -#include -#include -#include "../worker.h" -#include - -extern "C" { - #include "../kerberos_sspi.h" -} - -// SEC_WINNT_AUTH_IDENTITY makes it unusually hard -// to compile for both Unicode and ansi, so I use this macro: -#ifdef _UNICODE -#define USTR(str) (str) -#else -#define USTR(str) ((unsigned char*)(str)) -#endif - -using namespace v8; -using namespace node; - -class SecurityCredentials : public ObjectWrap { - public: - SecurityCredentials(); - ~SecurityCredentials(); - - // Pointer to context object - SEC_WINNT_AUTH_IDENTITY m_Identity; - // credentials - CredHandle m_Credentials; - // Expiry time for ticket - TimeStamp Expiration; - - // Has instance check - static inline bool HasInstance(Handle val) { - if (!val->IsObject()) return false; - Local obj = val->ToObject(); - return constructor_template->HasInstance(obj); - }; - - // Functions available from V8 - static void Initialize(Handle target); - static Handle AquireSync(const Arguments &args); - static Handle Aquire(const Arguments &args); - - // Constructor used for creating new Long objects from C++ - static Persistent constructor_template; - - private: - // Create a new instance - static Handle New(const Arguments &args); - // Handles the uv calls - static void Process(uv_work_t* work_req); - // Called after work is done - static void After(uv_work_t* work_req); -}; - -#endif \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js deleted file mode 100644 index 4215c9274..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/win32/wrappers/security_credentials.js +++ /dev/null @@ -1,22 +0,0 @@ -var SecurityCredentialsNative = require('../../../build/Release/kerberos').SecurityCredentials; - -// Add simple kebros helper -SecurityCredentialsNative.aquire_kerberos = function(username, password, domain, callback) { - if(typeof password == 'function') { - callback = password; - password = null; - } else if(typeof domain == 'function') { - callback = domain; - domain = null; - } - - // We are going to use the async version - if(typeof callback == 'function') { - return SecurityCredentialsNative.aquire('Kerberos', username, password, domain, callback); - } else { - return SecurityCredentialsNative.aquireSync('Kerberos', username, password, domain); - } -} - -// Export the modified class -exports.SecurityCredentials = SecurityCredentialsNative; \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.cc b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.cc deleted file mode 100644 index e7a472f67..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.cc +++ /dev/null @@ -1,7 +0,0 @@ -#include "worker.h" - -Worker::Worker() { -} - -Worker::~Worker() { -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.h b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.h deleted file mode 100644 index c5f86f521..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/lib/worker.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef WORKER_H_ -#define WORKER_H_ - -#include -#include -#include - -using namespace node; -using namespace v8; - -class Worker { - public: - Worker(); - virtual ~Worker(); - - // libuv's request struct. - uv_work_t request; - // Callback - v8::Persistent callback; - // // Arguments - // v8::Persistent arguments; - // Parameters - void *parameters; - // Results - void *return_value; - // Did we raise an error - bool error; - // The error message - char *error_message; - // Error code if not message - int error_code; - // Any return code - int return_code; - // Method we are going to fire - void (*execute)(Worker *worker); - Handle (*mapper)(Worker *worker); -}; - -#endif // WORKER_H_ diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/package.json b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/package.json deleted file mode 100644 index b29cfc2c8..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "kerberos", - "version": "0.0.3", - "description": "Kerberos library for Node.js", - "main": "index.js", - "scripts": { - "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", - "test": "nodeunit ./test" - }, - "repository": { - "type": "git", - "url": "https://github.com/christkv/kerberos.git" - }, - "keywords": [ - "kerberos", - "security", - "authentication" - ], - "devDependencies": { - "nodeunit": "latest" - }, - "author": { - "name": "Christian Amor Kvalheim" - }, - "license": "Apache 2.0", - "readmeFilename": "README.md", - "gitHead": "bb01d4fe322e022999aca19da564e7d9db59a8ed", - "readme": "kerberos\n========\n\nKerberos library for node.js", - "bugs": { - "url": "https://github.com/christkv/kerberos/issues" - }, - "homepage": "https://github.com/christkv/kerberos", - "_id": "kerberos@0.0.3", - "dist": { - "shasum": "53d6ed947fa0d8306a67680dffb4bd32a51e3839" - }, - "_from": "kerberos@0.0.3", - "_resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz" -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js deleted file mode 100644 index a06c5fdfe..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_tests.js +++ /dev/null @@ -1,34 +0,0 @@ -exports.setUp = function(callback) { - callback(); -} - -exports.tearDown = function(callback) { - callback(); -} - -exports['Simple initialize of Kerberos object'] = function(test) { - var Kerberos = require('../lib/kerberos.js').Kerberos; - var kerberos = new Kerberos(); - // console.dir(kerberos) - - // Initiate kerberos client - kerberos.authGSSClientInit('mongodb@kdc.10gen.me', Kerberos.GSS_C_MUTUAL_FLAG, function(err, context) { - console.log("===================================== authGSSClientInit") - test.equal(null, err); - test.ok(context != null && typeof context == 'object'); - // console.log("===================================== authGSSClientInit") - console.dir(err) - console.dir(context) - // console.dir(typeof result) - - // Perform the first step - kerberos.authGSSClientStep(context, function(err, result) { - console.log("===================================== authGSSClientStep") - console.dir(err) - console.dir(result) - console.dir(context) - - test.done(); - }); - }); -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js deleted file mode 100644 index d2f704638..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/kerberos_win32_test.js +++ /dev/null @@ -1,19 +0,0 @@ -exports.setUp = function(callback) { - callback(); -} - -exports.tearDown = function(callback) { - callback(); -} - -exports['Simple initialize of Kerberos win32 object'] = function(test) { - var KerberosNative = require('../build/Release/kerberos').Kerberos; - // console.dir(KerberosNative) - var kerberos = new KerberosNative(); - console.log("=========================================== 0") - console.dir(kerberos.acquireAlternateCredentials("dev1@10GEN.ME", "a")); - console.log("=========================================== 1") - console.dir(kerberos.prepareOutboundPackage("mongodb/kdc.10gen.com")); - console.log("=========================================== 2") - test.done(); -} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js deleted file mode 100644 index 3531b6bc2..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_descriptor_tests.js +++ /dev/null @@ -1,41 +0,0 @@ -exports.setUp = function(callback) { - callback(); -} - -exports.tearDown = function(callback) { - callback(); -} - -exports['Initialize a security Buffer Descriptor'] = function(test) { - var SecurityBufferDescriptor = require('../../lib/sspi.js').SecurityBufferDescriptor - SecurityBuffer = require('../../lib/sspi.js').SecurityBuffer; - - // Create descriptor with single Buffer - var securityDescriptor = new SecurityBufferDescriptor(100); - try { - // Fail to work due to no valid Security Buffer - securityDescriptor = new SecurityBufferDescriptor(["hello"]); - test.ok(false); - } catch(err){} - - // Should Correctly construct SecurityBuffer - var buffer = new SecurityBuffer(SecurityBuffer.DATA, 100); - securityDescriptor = new SecurityBufferDescriptor([buffer]); - // Should correctly return a buffer - var result = securityDescriptor.toBuffer(); - test.equal(100, result.length); - - // Should Correctly construct SecurityBuffer - var buffer = new SecurityBuffer(SecurityBuffer.DATA, new Buffer("hello world")); - securityDescriptor = new SecurityBufferDescriptor([buffer]); - var result = securityDescriptor.toBuffer(); - test.equal("hello world", result.toString()); - - // Test passing in more than one Buffer - var buffer = new SecurityBuffer(SecurityBuffer.DATA, new Buffer("hello world")); - var buffer2 = new SecurityBuffer(SecurityBuffer.STREAM, new Buffer("adam and eve")); - securityDescriptor = new SecurityBufferDescriptor([buffer, buffer2]); - var result = securityDescriptor.toBuffer(); - test.equal("hello worldadam and eve", result.toString()); - test.done(); -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js deleted file mode 100644 index b52b9598b..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_buffer_tests.js +++ /dev/null @@ -1,22 +0,0 @@ -exports.setUp = function(callback) { - callback(); -} - -exports.tearDown = function(callback) { - callback(); -} - -exports['Initialize a security Buffer'] = function(test) { - var SecurityBuffer = require('../../lib/sspi.js').SecurityBuffer; - // Create empty buffer - var securityBuffer = new SecurityBuffer(SecurityBuffer.DATA, 100); - var buffer = securityBuffer.toBuffer(); - test.equal(100, buffer.length); - - // Access data passed in - var allocated_buffer = new Buffer(256); - securityBuffer = new SecurityBuffer(SecurityBuffer.DATA, allocated_buffer); - buffer = securityBuffer.toBuffer(); - test.deepEqual(allocated_buffer, buffer); - test.done(); -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js deleted file mode 100644 index 775818007..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/kerberos/test/win32/security_credentials_tests.js +++ /dev/null @@ -1,55 +0,0 @@ -exports.setUp = function(callback) { - callback(); -} - -exports.tearDown = function(callback) { - callback(); -} - -exports['Initialize a set of security credentials'] = function(test) { - var SecurityCredentials = require('../../lib/sspi.js').SecurityCredentials; - - // Aquire some credentials - try { - var credentials = SecurityCredentials.aquire('Kerberos', 'dev1@10GEN.ME', 'a'); - } catch(err) { - console.dir(err) - test.ok(false); - } - - - - // console.dir(SecurityCredentials); - - // var SecurityBufferDescriptor = require('../../lib/sspi.js').SecurityBufferDescriptor - // SecurityBuffer = require('../../lib/sspi.js').SecurityBuffer; - - // // Create descriptor with single Buffer - // var securityDescriptor = new SecurityBufferDescriptor(100); - // try { - // // Fail to work due to no valid Security Buffer - // securityDescriptor = new SecurityBufferDescriptor(["hello"]); - // test.ok(false); - // } catch(err){} - - // // Should Correctly construct SecurityBuffer - // var buffer = new SecurityBuffer(SecurityBuffer.DATA, 100); - // securityDescriptor = new SecurityBufferDescriptor([buffer]); - // // Should correctly return a buffer - // var result = securityDescriptor.toBuffer(); - // test.equal(100, result.length); - - // // Should Correctly construct SecurityBuffer - // var buffer = new SecurityBuffer(SecurityBuffer.DATA, new Buffer("hello world")); - // securityDescriptor = new SecurityBufferDescriptor([buffer]); - // var result = securityDescriptor.toBuffer(); - // test.equal("hello world", result.toString()); - - // // Test passing in more than one Buffer - // var buffer = new SecurityBuffer(SecurityBuffer.DATA, new Buffer("hello world")); - // var buffer2 = new SecurityBuffer(SecurityBuffer.STREAM, new Buffer("adam and eve")); - // securityDescriptor = new SecurityBufferDescriptor([buffer, buffer2]); - // var result = securityDescriptor.toBuffer(); - // test.equal("hello worldadam and eve", result.toString()); - test.done(); -} \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/package.json b/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/package.json deleted file mode 100644 index e8304ffec..000000000 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/package.json +++ /dev/null @@ -1,228 +0,0 @@ -{ - "name": "mongodb", - "description": "A node.js driver for MongoDB", - "keywords": [ - "mongodb", - "mongo", - "driver", - "db" - ], - "version": "1.3.19", - "author": { - "name": "Christian Amor Kvalheim", - "email": "christkv@gmail.com" - }, - "contributors": [ - { - "name": "Aaron Heckmann" - }, - { - "name": "Christoph Pojer" - }, - { - "name": "Pau Ramon Revilla" - }, - { - "name": "Nathan White" - }, - { - "name": "Emmerman" - }, - { - "name": "Seth LaForge" - }, - { - "name": "Boris Filipov" - }, - { - "name": "Stefan Schärmeli" - }, - { - "name": "Tedde Lundgren" - }, - { - "name": "renctan" - }, - { - "name": "Sergey Ukustov" - }, - { - "name": "Ciaran Jessup" - }, - { - "name": "kuno" - }, - { - "name": "srimonti" - }, - { - "name": "Erik Abele" - }, - { - "name": "Pratik Daga" - }, - { - "name": "Slobodan Utvic" - }, - { - "name": "Kristina Chodorow" - }, - { - "name": "Yonathan Randolph" - }, - { - "name": "Brian Noguchi" - }, - { - "name": "Sam Epstein" - }, - { - "name": "James Harrison Fisher" - }, - { - "name": "Vladimir Dronnikov" - }, - { - "name": "Ben Hockey" - }, - { - "name": "Henrik Johansson" - }, - { - "name": "Simon Weare" - }, - { - "name": "Alex Gorbatchev" - }, - { - "name": "Shimon Doodkin" - }, - { - "name": "Kyle Mueller" - }, - { - "name": "Eran Hammer-Lahav" - }, - { - "name": "Marcin Ciszak" - }, - { - "name": "François de Metz" - }, - { - "name": "Vinay Pulim" - }, - { - "name": "nstielau" - }, - { - "name": "Adam Wiggins" - }, - { - "name": "entrinzikyl" - }, - { - "name": "Jeremy Selier" - }, - { - "name": "Ian Millington" - }, - { - "name": "Public Keating" - }, - { - "name": "andrewjstone" - }, - { - "name": "Christopher Stott" - }, - { - "name": "Corey Jewett" - }, - { - "name": "brettkiefer" - }, - { - "name": "Rob Holland" - }, - { - "name": "Senmiao Liu" - }, - { - "name": "heroic" - }, - { - "name": "gitfy" - }, - { - "name": "Andrew Stone" - }, - { - "name": "John Le Drew" - }, - { - "name": "Lucasfilm Singapore" - }, - { - "name": "Roman Shtylman" - }, - { - "name": "Matt Self" - } - ], - "repository": { - "type": "git", - "url": "http://github.com/mongodb/node-mongodb-native.git" - }, - "bugs": { - "url": "http://github.com/mongodb/node-mongodb-native/issues" - }, - "dependencies": { - "bson": "0.2.2", - "kerberos": "0.0.3" - }, - "devDependencies": { - "dox": "0.2.0", - "uglify-js": "1.2.5", - "ejs": "0.6.1", - "request": "2.12.0", - "nodeunit": "0.7.4", - "markdown": "0.3.1", - "gleak": "0.2.3", - "step": "0.0.5", - "async": "0.1.22", - "integra": "latest", - "optimist": "latest" - }, - "optionalDependencies": { - "kerberos": "0.0.3" - }, - "config": { - "native": false - }, - "main": "./lib/mongodb/index", - "homepage": "http://mongodb.github.com/node-mongodb-native/", - "directories": { - "lib": "./lib/mongodb" - }, - "engines": { - "node": ">=0.6.19" - }, - "scripts": { - "test": "make test_functional" - }, - "licenses": [ - { - "type": "Apache License, Version 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0" - } - ], - "readme": "Up to date documentation\n========================\n\n[Documentation](http://mongodb.github.com/node-mongodb-native/)\n\nInstall\n=======\n\nTo install the most recent release from npm, run:\n\n npm install mongodb\n\nThat may give you a warning telling you that bugs['web'] should be bugs['url'], it would be safe to ignore it (this has been fixed in the development version)\n\nTo install the latest from the repository, run::\n\n npm install path/to/node-mongodb-native\n\nCommunity\n=========\nCheck out the google group [node-mongodb-native](http://groups.google.com/group/node-mongodb-native) for questions/answers from users of the driver.\n\nLive Examples\n============\n\n\nIntroduction\n============\n\nThis is a node.js driver for MongoDB. It's a port (or close to a port) of the library for ruby at http://github.com/mongodb/mongo-ruby-driver/.\n\nA simple example of inserting a document.\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n var collection = db.collection('test_insert');\n collection.insert({a:2}, function(err, docs) {\n \n collection.count(function(err, count) {\n console.log(format(\"count = %s\", count));\n });\n\n // Locate all the entries using find\n collection.find().toArray(function(err, results) {\n console.dir(results);\n // Let's close the db\n db.close();\n }); \n });\n })\n```\n\nData types\n==========\n\nTo store and retrieve the non-JSON MongoDb primitives ([ObjectID](http://www.mongodb.org/display/DOCS/Object+IDs), Long, Binary, [Timestamp](http://www.mongodb.org/display/DOCS/Timestamp+data+type), [DBRef](http://www.mongodb.org/display/DOCS/Database+References#DatabaseReferences-DBRef), Code).\n\nIn particular, every document has a unique `_id` which can be almost any type, and by default a 12-byte ObjectID is created. ObjectIDs can be represented as 24-digit hexadecimal strings, but you must convert the string back into an ObjectID before you can use it in the database. For example:\n\n```javascript\n // Get the objectID type\n var ObjectID = require('mongodb').ObjectID;\n\n var idString = '4e4e1638c85e808431000003';\n collection.findOne({_id: new ObjectID(idString)}, console.log) // ok\n collection.findOne({_id: idString}, console.log) // wrong! callback gets undefined\n```\n\nHere are the constructors the non-Javascript BSON primitive types:\n\n```javascript\n // Fetch the library\n var mongo = require('mongodb');\n // Create new instances of BSON types\n new mongo.Long(numberString)\n new mongo.ObjectID(hexString)\n new mongo.Timestamp() // the actual unique number is generated on insert.\n new mongo.DBRef(collectionName, id, dbName)\n new mongo.Binary(buffer) // takes a string or Buffer\n new mongo.Code(code, [context])\n new mongo.Symbol(string)\n new mongo.MinKey()\n new mongo.MaxKey()\n new mongo.Double(number)\t// Force double storage\n```\n\nThe C/C++ bson parser/serializer\n--------------------------------\n\nIf you are running a version of this library has the C/C++ parser compiled, to enable the driver to use the C/C++ bson parser pass it the option native_parser:true like below\n\n```javascript\n // using native_parser:\n MongoClient.connect('mongodb://127.0.0.1:27017/test'\n , {db: {native_parser: true}}, function(err, db) {})\n```\n\nThe C++ parser uses the js objects both for serialization and deserialization.\n\nGitHub information\n==================\n\nThe source code is available at http://github.com/mongodb/node-mongodb-native.\nYou can either clone the repository or download a tarball of the latest release.\n\nOnce you have the source you can test the driver by running\n\n $ make test\n\nin the main directory. You will need to have a mongo instance running on localhost for the integration tests to pass.\n\nExamples\n========\n\nFor examples look in the examples/ directory. You can execute the examples using node.\n\n $ cd examples\n $ node queries.js\n\nGridStore\n=========\n\nThe GridStore class allows for storage of binary files in mongoDB using the mongoDB defined files and chunks collection definition.\n\nFor more information have a look at [Gridstore](https://github.com/mongodb/node-mongodb-native/blob/master/docs/gridfs.md)\n\nReplicasets\n===========\nFor more information about how to connect to a replicaset have a look at the extensive documentation [Documentation](http://mongodb.github.com/node-mongodb-native/)\n\nPrimary Key Factories\n---------------------\n\nDefining your own primary key factory allows you to generate your own series of id's\n(this could f.ex be to use something like ISBN numbers). The generated the id needs to be a 12 byte long \"string\".\n\nSimple example below\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n // Custom factory (need to provide a 12 byte array);\n CustomPKFactory = function() {}\n CustomPKFactory.prototype = new Object();\n CustomPKFactory.createPk = function() {\n return new ObjectID(\"aaaaaaaaaaaa\");\n }\n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n db.dropDatabase(function(err, done) {\n \n db.createCollection('test_custom_key', function(err, collection) {\n \n collection.insert({'a':1}, function(err, docs) {\n \n collection.find({'_id':new ObjectID(\"aaaaaaaaaaaa\")}).toArray(function(err, items) {\n console.dir(items);\n // Let's close the db\n db.close();\n });\n });\n });\n });\n });\n```\n\nDocumentation\n=============\n\nIf this document doesn't answer your questions, see the source of\n[Collection](https://github.com/mongodb/node-mongodb-native/blob/master/lib/mongodb/collection.js)\nor [Cursor](https://github.com/mongodb/node-mongodb-native/blob/master/lib/mongodb/cursor.js),\nor the documentation at MongoDB for query and update formats.\n\nFind\n----\n\nThe find method is actually a factory method to create\nCursor objects. A Cursor lazily uses the connection the first time\nyou call `nextObject`, `each`, or `toArray`.\n\nThe basic operation on a cursor is the `nextObject` method\nthat fetches the next matching document from the database. The convenience\nmethods `each` and `toArray` call `nextObject` until the cursor is exhausted.\n\nSignatures:\n\n```javascript\n var cursor = collection.find(query, [fields], options);\n cursor.sort(fields).limit(n).skip(m).\n\n cursor.nextObject(function(err, doc) {});\n cursor.each(function(err, doc) {});\n cursor.toArray(function(err, docs) {});\n\n cursor.rewind() // reset the cursor to its initial state.\n```\n\nUseful chainable methods of cursor. These can optionally be options of `find` instead of method calls:\n\n * `.limit(n).skip(m)` to control paging.\n * `.sort(fields)` Order by the given fields. There are several equivalent syntaxes:\n * `.sort({field1: -1, field2: 1})` descending by field1, then ascending by field2.\n * `.sort([['field1', 'desc'], ['field2', 'asc']])` same as above\n * `.sort([['field1', 'desc'], 'field2'])` same as above\n * `.sort('field1')` ascending by field1\n\nOther options of `find`:\n\n* `fields` the fields to fetch (to avoid transferring the entire document)\n* `tailable` if true, makes the cursor [tailable](http://www.mongodb.org/display/DOCS/Tailable+Cursors).\n* `batchSize` The number of the subset of results to request the database\nto return for every request. This should initially be greater than 1 otherwise\nthe database will automatically close the cursor. The batch size can be set to 1\nwith `batchSize(n, function(err){})` after performing the initial query to the database.\n* `hint` See [Optimization: hint](http://www.mongodb.org/display/DOCS/Optimization#Optimization-Hint).\n* `explain` turns this into an explain query. You can also call\n`explain()` on any cursor to fetch the explanation.\n* `snapshot` prevents documents that are updated while the query is active\nfrom being returned multiple times. See more\n[details about query snapshots](http://www.mongodb.org/display/DOCS/How+to+do+Snapshotted+Queries+in+the+Mongo+Database).\n* `timeout` if false, asks MongoDb not to time out this cursor after an\ninactivity period.\n\n\nFor information on how to create queries, see the\n[MongoDB section on querying](http://www.mongodb.org/display/DOCS/Querying).\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n var collection = db\n .collection('test')\n .find({})\n .limit(10)\n .toArray(function(err, docs) {\n console.dir(docs);\n });\n });\n```\n\nInsert\n------\n\nSignature:\n\n```javascript\n collection.insert(docs, options, [callback]);\n```\n\nwhere `docs` can be a single document or an array of documents.\n\nUseful options:\n\n* `safe:true` Should always set if you have a callback.\n\nSee also: [MongoDB docs for insert](http://www.mongodb.org/display/DOCS/Inserting).\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n \n db.collection('test').insert({hello: 'world'}, {w:1}, function(err, objects) {\n if (err) console.warn(err.message);\n if (err && err.message.indexOf('E11000 ') !== -1) {\n // this _id was already inserted in the database\n }\n });\n });\n```\n\nNote that there's no reason to pass a callback to the insert or update commands\nunless you use the `safe:true` option. If you don't specify `safe:true`, then\nyour callback will be called immediately.\n\nUpdate; update and insert (upsert)\n----------------------------------\n\nThe update operation will update the first document that matches your query\n(or all documents that match if you use `multi:true`).\nIf `safe:true`, `upsert` is not set, and no documents match, your callback will return 0 documents updated.\n\nSee the [MongoDB docs](http://www.mongodb.org/display/DOCS/Updating) for\nthe modifier (`$inc`, `$set`, `$push`, etc.) formats.\n\nSignature:\n\n```javascript\n collection.update(criteria, objNew, options, [callback]);\n```\n\nUseful options:\n\n* `safe:true` Should always set if you have a callback.\n* `multi:true` If set, all matching documents are updated, not just the first.\n* `upsert:true` Atomically inserts the document if no documents matched.\n\nExample for `update`:\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n\n db.collection('test').update({hi: 'here'}, {$set: {hi: 'there'}}, {w:1}, function(err) {\n if (err) console.warn(err.message);\n else console.log('successfully updated');\n });\n });\n```\n\nFind and modify\n---------------\n\n`findAndModify` is like `update`, but it also gives the updated document to\nyour callback. But there are a few key differences between findAndModify and\nupdate:\n\n 1. The signatures differ.\n 2. You can only findAndModify a single item, not multiple items.\n\nSignature:\n\n```javascript\n collection.findAndModify(query, sort, update, options, callback)\n```\n\nThe sort parameter is used to specify which object to operate on, if more than\none document matches. It takes the same format as the cursor sort (see\nConnection.find above).\n\nSee the\n[MongoDB docs for findAndModify](http://www.mongodb.org/display/DOCS/findAndModify+Command)\nfor more details.\n\nUseful options:\n\n* `remove:true` set to a true to remove the object before returning\n* `new:true` set to true if you want to return the modified object rather than the original. Ignored for remove.\n* `upsert:true` Atomically inserts the document if no documents matched.\n\nExample for `findAndModify`:\n\n```javascript\n var MongoClient = require('mongodb').MongoClient\n , format = require('util').format; \n\n MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {\n if(err) throw err;\n db.collection('test').findAndModify({hello: 'world'}, [['_id','asc']], {$set: {hi: 'there'}}, {}, function(err, object) {\n if (err) console.warn(err.message);\n else console.dir(object); // undefined if no matching object exists.\n });\n });\n```\n\nSave\n----\n\nThe `save` method is a shorthand for upsert if the document contains an\n`_id`, or an insert if there is no `_id`.\n\nSponsors\n========\nJust as Felix Geisendörfer I'm also working on the driver for my own startup and this driver is a big project that also benefits other companies who are using MongoDB.\n\nIf your company could benefit from a even better-engineered node.js mongodb driver I would appreciate any type of sponsorship you may be able to provide. All the sponsors will get a lifetime display in this readme, priority support and help on problems and votes on the roadmap decisions for the driver. If you are interested contact me on [christkv AT g m a i l.com](mailto:christkv@gmail.com) for details.\n\nAnd I'm very thankful for code contributions. If you are interested in working on features please contact me so we can discuss API design and testing.\n\nRelease Notes\n=============\n\nSee HISTORY\n\nCredits\n=======\n\n1. [10gen](http://github.com/mongodb/mongo-ruby-driver/)\n2. [Google Closure Library](http://code.google.com/closure/library/)\n3. [Jonas Raoni Soares Silva](http://jsfromhell.com/classes/binary-parser)\n\nContributors\n============\n\nAaron Heckmann, Christoph Pojer, Pau Ramon Revilla, Nathan White, Emmerman, Seth LaForge, Boris Filipov, Stefan Schärmeli, Tedde Lundgren, renctan, Sergey Ukustov, Ciaran Jessup, kuno, srimonti, Erik Abele, Pratik Daga, Slobodan Utvic, Kristina Chodorow, Yonathan Randolph, Brian Noguchi, Sam Epstein, James Harrison Fisher, Vladimir Dronnikov, Ben Hockey, Henrik Johansson, Simon Weare, Alex Gorbatchev, Shimon Doodkin, Kyle Mueller, Eran Hammer-Lahav, Marcin Ciszak, François de Metz, Vinay Pulim, nstielau, Adam Wiggins, entrinzikyl, Jeremy Selier, Ian Millington, Public Keating, andrewjstone, Christopher Stott, Corey Jewett, brettkiefer, Rob Holland, Senmiao Liu, heroic, gitfy\n\nLicense\n=======\n\n Copyright 2009 - 2012 Christian Amor Kvalheim.\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n", - "readmeFilename": "Readme.md", - "_id": "mongodb@1.3.19", - "dist": { - "shasum": "d0a396154c89e1a7137f3c64defee4dae7fa8abc" - }, - "_from": "mongodb@1.3.19", - "_resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz" -} diff --git a/node_modules/mongoose/node_modules/mquery/package.json b/node_modules/mongoose/node_modules/mquery/package.json deleted file mode 100644 index c694fdb25..000000000 --- a/node_modules/mongoose/node_modules/mquery/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "mquery", - "version": "0.4.1", - "description": "Expressive query building for MongoDB", - "main": "index.js", - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/mquery.git" - }, - "keywords": [ - "mongodb", - "query", - "builder" - ], - "dependencies": { - "sliced": "0.0.5", - "debug": "0.7.0", - "mongodb": "1.3.19", - "regexp-clone": "0.0.1" - }, - "devDependencies": { - "mocha": "1.9.x" - }, - "bugs": { - "url": "https://github.com/aheckmann/mquery/issues/new" - }, - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "homepage": "https://github.com/aheckmann/mquery/", - "readme": "#mquery\n===========\n\n`mquery` is a fluent mongodb query builder designed to run in multiple environments. As of v0.1, `mquery` runs on `Node.js` only with support for the MongoDB shell and browser environments planned for upcoming releases.\n\n##Features\n\n - fluent query builder api\n - custom base query support\n - MongoDB 2.4 geoJSON support\n - method + option combinations validation\n - node.js driver compatibility\n - environment detection\n - [debug](https://github.com/visionmedia/debug) support\n - separated collection implementations for maximum flexibility\n\n[![Build Status](https://travis-ci.org/aheckmann/mquery.png)](https://travis-ci.org/aheckmann/mquery)\n\n##Use\n\n```js\nrequire('mongodb').connect(uri, function (err, db) {\n if (err) return handleError(err);\n\n // get a collection\n var collection = db.collection('artists');\n\n // pass it to the constructor\n mquery(collection).find({..}, callback);\n\n // or pass it to the collection method\n mquery().find({..}).collection(collection).exec(callback)\n\n // or better yet, create a custom query constructor that has it always set\n var Artist = mquery(collection).toConstructor();\n Artist().find(..).where(..).exec(callback)\n})\n```\n\n`mquery` requires a collection object to work with. In the example above we just pass the collection object created using the official [MongoDB driver](https://github.com/mongodb/node-mongodb-native).\n\n\n##Fluent API\n\n###find()\n\nDeclares this query a _find_ query. Optionally pass a match clause and / or callback. If a callback is passed the query is executed.\n\n```js\nmquery().find()\nmquery().find(match)\nmquery().find(callback)\nmquery().find(match, function (err, docs) {\n assert(Array.isArray(docs));\n})\n```\n\n###findOne()\n\nDeclares this query a _findOne_ query. Optionally pass a match clause and / or callback. If a callback is passed the query is executed.\n\n```js\nmquery().findOne()\nmquery().findOne(match)\nmquery().findOne(callback)\nmquery().findOne(match, function (err, doc) {\n if (doc) {\n // the document may not be found\n console.log(doc);\n }\n})\n```\n\n###count()\n\nDeclares this query a _count_ query. Optionally pass a match clause and / or callback. If a callback is passed the query is executed.\n\n```js\nmquery().count()\nmquery().count(match)\nmquery().count(callback)\nmquery().count(match, function (err, number){\n console.log('we found %d matching documents', number);\n})\n```\n\n###remove()\n\nDeclares this query a _remove_ query. Optionally pass a match clause and / or callback. If a callback is passed the query is executed.\n\n```js\nmquery().remove()\nmquery().remove(match)\nmquery().remove(callback)\nmquery().remove(match, function (err){})\n```\n\n###update()\n\nDeclares this query an _update_ query. Optionally pass an update document, match clause, options or callback. If a callback is passed, the query is executed. To force execution without passing a callback, run `update(true)`.\n\n```js\nmquery().update()\nmquery().update(match, updateDocument)\nmquery().update(match, updateDocument, options)\n\n// the following all execute the command\nmquery().update(callback)\nmquery().update({$set: updateDocument, callback)\nmquery().update(match, updateDocument, callback)\nmquery().update(match, updateDocument, options, function (err, result){})\nmquery().update(true) // executes (unsafe write)\n```\n\n#####the update document\n\nAll paths passed that are not `$atomic` operations will become `$set` ops. For example:\n\n```js\nmquery(collection).where({ _id: id }).update({ title: 'words' }, callback)\n```\n\nbecomes\n\n```js\ncollection.update({ _id: id }, { $set: { title: 'words' }}, callback)\n```\n\nThis behavior can be overridden using the `overwrite` option (see below).\n\n#####options\n\nOptions are passed to the `setOptions()` method.\n\n- overwrite\n\nPassing an empty object `{ }` as the update document will result in a no-op unless the `overwrite` option is passed. Without the `overwrite` option, the update operation will be ignored and the callback executed without sending the command to MongoDB to prevent accidently overwritting documents in the collection.\n\n```js\nvar q = mquery(collection).where({ _id: id }).setOptions({ overwrite: true });\nq.update({ }, callback); // overwrite with an empty doc\n```\n\nThe `overwrite` option isn't just for empty objects, it also provides a means to override the default `$set` conversion and send the update document as is.\n\n```js\n// create a base query\nvar base = mquery({ _id: 108 }).collection(collection).toConstructor();\n\nbase().findOne(function (err, doc) {\n console.log(doc); // { _id: 108, name: 'cajon' })\n\n base().setOptions({ overwrite: true }).update({ changed: true }, function (err) {\n base.findOne(function (err, doc) {\n console.log(doc); // { _id: 108, changed: true }) - the doc was overwritten\n });\n });\n})\n```\n\n- multi\n\nUpdates only modify a single document by default. To update multiple documents, set the `multi` option to `true`.\n\n```js\nmquery()\n .collection(coll)\n .update({ name: /^match/ }, { $addToSet: { arr: 4 }}, { multi: true }, callback)\n\n// another way of doing it\nmquery({ name: /^match/ })\n .collection(coll)\n .setOptions({ multi: true })\n .update({ $addToSet: { arr: 4 }}, callback)\n\n// update multiple documents with an empty doc\nvar q = mquery(collection).where({ name: /^match/ });\nq.setOptions({ multi: true, overwrite: true })\nq.update({ });\nq.update(function (err, result) {\n console.log(arguments);\n});\n```\n\n###findOneAndUpdate()\n\nDeclares this query a _findAndModify_ with update query. Optionally pass a match clause, update document, options, or callback. If a callback is passed, the query is executed.\n\nWhen executed, the first matching document (if found) is modified according to the update document and passed back to the callback.\n\n#####options\n\nOptions are passed to the `setOptions()` method.\n\n- `new`: boolean - true to return the modified document rather than the original. defaults to true\n- `upsert`: boolean - creates the object if it doesn't exist. defaults to false\n- `sort`: if multiple docs are found by the match condition, sets the sort order to choose which doc to update\n\n```js\nquery.findOneAndUpdate()\nquery.findOneAndUpdate(updateDocument)\nquery.findOneAndUpdate(match, updateDocument)\nquery.findOneAndUpdate(match, updateDocument, options)\n\n// the following all execute the command\nquery.findOneAndUpdate(callback)\nquery.findOneAndUpdate(updateDocument, callback)\nquery.findOneAndUpdate(match, updateDocument, callback)\nquery.findOneAndUpdate(match, updateDocument, options, function (err, doc) {\n if (doc) {\n // the document may not be found\n console.log(doc);\n }\n})\n ```\n\n###findOneAndRemove()\n\nDeclares this query a _findAndModify_ with remove query. Optionally pass a match clause, options, or callback. If a callback is passed, the query is executed.\n\nWhen executed, the first matching document (if found) is modified according to the update document, removed from the collection and passed to the callback.\n\n#####options\n\nOptions are passed to the `setOptions()` method.\n\n- `sort`: if multiple docs are found by the condition, sets the sort order to choose which doc to modify and remove\n\n```js\nA.where().findOneAndRemove()\nA.where().findOneAndRemove(match)\nA.where().findOneAndRemove(match, options)\n\n// the following all execute the command\nA.where().findOneAndRemove(callback)\nA.where().findOneAndRemove(match, callback)\nA.where().findOneAndRemove(match, options, function (err, doc) {\n if (doc) {\n // the document may not be found\n console.log(doc);\n }\n})\n ```\n\n###distinct()\n\nDeclares this query a _distinct_ query. Optionally pass the distinct field, a match clause or callback. If a callback is passed the query is executed.\n\n```js\nmquery().distinct()\nmquery().distinct(match)\nmquery().distinct(match, field)\nmquery().distinct(field)\n\n// the following all execute the command\nmquery().distinct(callback)\nmquery().distinct(field, callback)\nmquery().distinct(match, callback)\nmquery().distinct(match, field, function (err, result) {\n console.log(result);\n})\n```\n\n###exec()\n\nExecutes the query.\n\n```js\nmquery().findOne().where('route').intersects(polygon).exec(function (err, docs){})\n```\n\n-------------\n\n###all()\n\nSpecifies an `$all` query condition\n\n```js\nmquery().where('permission').all(['read', 'write'])\n```\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/all/)\n\n###and()\n\nSpecifies arguments for an `$and` condition\n\n```js\nmquery().and([{ color: 'green' }, { status: 'ok' }])\n```\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/and/)\n\n###box()\n\nSpecifies a `$box` condition\n\n```js\nvar lowerLeft = [40.73083, -73.99756]\nvar upperRight= [40.741404, -73.988135]\n\nmquery().where('location').within().box(lowerLeft, upperRight)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/box/)\n\n###circle()\n\nSpecifies a `$center` or `$centerSphere` condition.\n\n```js\nvar area = { center: [50, 50], radius: 10, unique: true }\nquery.where('loc').within().circle(area)\nquery.circle('loc', area);\n\n// for spherical calculations\nvar area = { center: [50, 50], radius: 10, unique: true, spherical: true }\nquery.where('loc').within().circle(area)\nquery.circle('loc', area);\n```\n\n- [MongoDB Documentation - center](http://docs.mongodb.org/manual/reference/operator/center/)\n- [MongoDB Documentation - centerSphere](http://docs.mongodb.org/manual/reference/operator/centerSphere/)\n\n###elemMatch()\n\nSpecifies an `$elemMatch` condition\n\n```js\nquery.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}})\n\nquery.elemMatch('comment', function (elem) {\n elem.where('author').equals('autobot');\n elem.where('votes').gte(5);\n})\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/elemMatch/)\n\n###equals()\n\nSpecifies the complementary comparison value for the path specified with `where()`.\n\n```js\nmquery().where('age').equals(49);\n\n// is the same as\n\nmquery().where({ 'age': 49 });\n```\n\n###exists()\n\nSpecifies an `$exists` condition\n\n```js\n// { name: { $exists: true }}\nmquery().where('name').exists()\nmquery().where('name').exists(true)\nmquery().exists('name')\n\n// { name: { $exists: false }}\nmquery().where('name').exists(false);\nmquery().exists('name', false);\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/exists/)\n\n###geometry()\n\nSpecifies a `$geometry` condition\n\n```js\nvar polyA = [[[ 10, 20 ], [ 10, 40 ], [ 30, 40 ], [ 30, 20 ]]]\nquery.where('loc').within().geometry({ type: 'Polygon', coordinates: polyA })\n\n// or\nvar polyB = [[ 0, 0 ], [ 1, 1 ]]\nquery.where('loc').within().geometry({ type: 'LineString', coordinates: polyB })\n\n// or\nvar polyC = [ 0, 0 ]\nquery.where('loc').within().geometry({ type: 'Point', coordinates: polyC })\n\n// or\nquery.where('loc').intersects().geometry({ type: 'Point', coordinates: polyC })\n\n// or\nquery.where('loc').near().geometry({ type: 'Point', coordinates: [3,5] })\n```\n\n`geometry()` **must** come after `intersects()`, `within()`, or `near()`.\n\nThe `object` argument must contain `type` and `coordinates` properties.\n\n- type `String`\n- coordinates `Array`\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/geometry/)\n\n###gt()\n\nSpecifies a `$gt` query condition.\n\n```js\nmquery().where('clicks').gt(999)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/gt/)\n\n###gte()\n\nSpecifies a `$gte` query condition.\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/gte/)\n\n```js\nmquery().where('clicks').gte(1000)\n```\n\n###in()\n\nSpecifies an `$in` query condition.\n\n```js\nmquery().where('author_id').in([3, 48901, 761])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/in/)\n\n###intersects()\n\nDeclares an `$geoIntersects` query for `geometry()`.\n\n```js\nquery.where('path').intersects().geometry({\n type: 'LineString'\n , coordinates: [[180.0, 11.0], [180, 9.0]]\n})\n\n// geometry arguments are supported\nquery.where('path').intersects({\n type: 'LineString'\n , coordinates: [[180.0, 11.0], [180, 9.0]]\n})\n```\n\n**Must** be used after `where()`.\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/geoIntersects/)\n\n###lt()\n\nSpecifies a `$lt` query condition.\n\n```js\nmquery().where('clicks').lt(50)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/lt/)\n\n###lte()\n\nSpecifies a `$lte` query condition.\n\n```js\nmquery().where('clicks').lte(49)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/lte/)\n\n###maxDistance()\n\nSpecifies a `$maxDistance` query condition.\n\n```js\nmquery().where('location').near({ center: [139, 74.3] }).maxDistance(5)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/maxDistance/)\n\n###mod()\n\nSpecifies a `$mod` condition\n\n```js\nmquery().where('count').mod(2, 0)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/mod/)\n\n###ne()\n\nSpecifies a `$ne` query condition.\n\n```js\nmquery().where('status').ne('ok')\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/ne/)\n\n###nin()\n\nSpecifies an `$nin` query condition.\n\n```js\nmquery().where('author_id').nin([3, 48901, 761])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/nin/)\n\n###nor()\n\nSpecifies arguments for an `$nor` condition.\n\n```js\nmquery().nor([{ color: 'green' }, { status: 'ok' }])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/nor/)\n\n###near()\n\nSpecifies arguments for a `$near` or `$nearSphere` condition.\n\nThese operators return documents sorted by distance.\n\n####Example\n\n```js\nquery.where('loc').near({ center: [10, 10] });\nquery.where('loc').near({ center: [10, 10], maxDistance: 5 });\nquery.near('loc', { center: [10, 10], maxDistance: 5 });\n\n// GeoJSON\nquery.where('loc').near({ center: { type: 'Point', coordinates: [10, 10] }});\nquery.where('loc').near({ center: { type: 'Point', coordinates: [10, 10] }, maxDistance: 5, spherical: true });\nquery.where('loc').near().geometry({ type: 'Point', coordinates: [10, 10] });\n\n// For a $nearSphere condition, pass the `spherical` option.\nquery.near({ center: [10, 10], maxDistance: 5, spherical: true });\n```\n\n[MongoDB Documentation](http://www.mongodb.org/display/DOCS/Geospatial+Indexing)\n\n###or()\n\nSpecifies arguments for an `$or` condition.\n\n```js\nmquery().or([{ color: 'red' }, { status: 'emergency' }])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/or/)\n\n###polygon()\n\nSpecifies a `$polygon` condition\n\n```js\nmquery().where('loc').within().polygon([10,20], [13, 25], [7,15])\nmquery().polygon('loc', [10,20], [13, 25], [7,15])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/polygon/)\n\n###regex()\n\nSpecifies a `$regex` query condition.\n\n```js\nmquery().where('name').regex(/^sixstepsrecords/)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/regex/)\n\n###select()\n\nSpecifies which document fields to include or exclude\n\n```js\n// 1 means include, 0 means exclude\nmquery().select({ name: 1, address: 1, _id: 0 })\n\n// or\n\nmquery().select('name address -_id')\n```\n\n#####String syntax\n\nWhen passing a string, prefixing a path with `-` will flag that path as excluded. When a path does not have the `-` prefix, it is included.\n\n```js\n// include a and b, exclude c\nquery.select('a b -c');\n\n// or you may use object notation, useful when\n// you have keys already prefixed with a \"-\"\nquery.select({a: 1, b: 1, c: 0});\n```\n\n_Cannot be used with `distinct()`._\n\n###selected()\n\nDetermines if the query has selected any fields.\n\n```js\nvar query = mquery();\nquery.selected() // false\nquery.select('-name');\nquery.selected() // true\n```\n\n###selectedInclusively()\n\nDetermines if the query has selected any fields inclusively.\n\n```js\nvar query = mquery().select('name');\nquery.selectedInclusively() // true\n\nvar query = mquery();\nquery.selected() // false\nquery.select('-name');\nquery.selectedInclusively() // false\nquery.selectedExclusively() // true\n```\n\n###selectedExclusively()\n\nDetermines if the query has selected any fields exclusively.\n\n```js\nvar query = mquery().select('-name');\nquery.selectedExclusively() // true\n\nvar query = mquery();\nquery.selected() // false\nquery.select('name');\nquery.selectedExclusively() // false\nquery.selectedInclusively() // true\n```\n\n###size()\n\nSpecifies a `$size` query condition.\n\n```js\nmquery().where('someArray').size(6)\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/size/)\n\n###slice()\n\nSpecifies a `$slice` projection for a `path`\n\n```js\nmquery().where('comments').slice(5)\nmquery().where('comments').slice(-5)\nmquery().where('comments').slice([-10, 5])\n```\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/projection/slice/)\n\n###within()\n\nSets a `$geoWithin` or `$within` argument for geo-spatial queries.\n\n```js\nmquery().within().box()\nmquery().within().circle()\nmquery().within().geometry()\n\nmquery().where('loc').within({ center: [50,50], radius: 10, unique: true, spherical: true });\nmquery().where('loc').within({ box: [[40.73, -73.9], [40.7, -73.988]] });\nmquery().where('loc').within({ polygon: [[],[],[],[]] });\n\nmquery().where('loc').within([], [], []) // polygon\nmquery().where('loc').within([], []) // box\nmquery().where('loc').within({ type: 'LineString', coordinates: [...] }); // geometry\n```\n\nAs of mquery 2.0, `$geoWithin` is used by default. This impacts you if running MongoDB < 2.4. To alter this behavior, see [mquery.use$geoWithin](#mqueryusegeowithin).\n\n**Must** be used after `where()`.\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/reference/operator/geoWithin/)\n\n###where()\n\nSpecifies a `path` for use with chaining\n\n```js\n// instead of writing:\nmquery().find({age: {$gte: 21, $lte: 65}});\n\n// we can instead write:\nmquery().where('age').gte(21).lte(65);\n\n// passing query conditions is permitted too\nmquery().find().where({ name: 'vonderful' })\n\n// chaining\nmquery()\n.where('age').gte(21).lte(65)\n.where({ 'name': /^vonderful/i })\n.where('friends').slice(10)\n.exec(callback)\n```\n\n###$where()\n\nSpecifies a `$where` condition.\n\nUse `$where` when you need to select documents using a JavaScript expression.\n\n```js\nquery.$where('this.comments.length > 10 || this.name.length > 5').exec(callback)\n\nquery.$where(function () {\n return this.comments.length > 10 || this.name.length > 5;\n})\n```\n\nOnly use `$where` when you have a condition that cannot be met using other MongoDB operators like `$lt`. Be sure to read about all of [its caveats](http://docs.mongodb.org/manual/reference/operator/where/) before using.\n\n-----------\n\n###batchSize()\n\nSpecifies the batchSize option.\n\n```js\nquery.batchSize(100)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/method/cursor.batchSize/)\n\n###comment()\n\nSpecifies the comment option.\n\n```js\nquery.comment('login query');\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/)\n\n###hint()\n\nSets query hints.\n\n```js\nmquery().hint({ indexA: 1, indexB: -1 })\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/hint/)\n\n###limit()\n\nSpecifies the limit option.\n\n```js\nquery.limit(20)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/method/cursor.limit/)\n\n###maxScan()\n\nSpecifies the maxScan option.\n\n```js\nquery.maxScan(100)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/maxScan/)\n\n###skip()\n\nSpecifies the skip option.\n\n```js\nquery.skip(100).limit(20)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/method/cursor.skip/)\n\n###sort()\n\nSets the query sort order.\n\nIf an object is passed, key values allowed are `asc`, `desc`, `ascending`, `descending`, `1`, and `-1`.\n\nIf a string is passed, it must be a space delimited list of path names. The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending.\n\n```js\n// these are equivalent\nquery.sort({ field: 'asc', test: -1 });\nquery.sort('field -test');\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/method/cursor.sort/)\n\n###read()\n\nSets the readPreference option for the query.\n\n```js\nmquery().read('primary')\nmquery().read('p') // same as primary\n\nmquery().read('primaryPreferred')\nmquery().read('pp') // same as primaryPreferred\n\nmquery().read('secondary')\nmquery().read('s') // same as secondary\n\nmquery().read('secondaryPreferred')\nmquery().read('sp') // same as secondaryPreferred\n\nmquery().read('nearest')\nmquery().read('n') // same as nearest\n\n// specifying tags\nmquery().read('s', [{ dc:'sf', s: 1 },{ dc:'ma', s: 2 }])\n```\n\n#####Preferences:\n\n- `primary` - (default) Read from primary only. Operations will produce an error if primary is unavailable. Cannot be combined with tags.\n- `secondary` - Read from secondary if available, otherwise error.\n- `primaryPreferred` - Read from primary if available, otherwise a secondary.\n- `secondaryPreferred` - Read from a secondary if available, otherwise read from the primary.\n- `nearest` - All operations read from among the nearest candidates, but unlike other modes, this option will include both the primary and all secondaries in the random selection.\n\nAliases\n\n- `p` primary\n- `pp` primaryPreferred\n- `s` secondary\n- `sp` secondaryPreferred\n- `n` nearest\n\nRead more about how to use read preferrences [here](http://docs.mongodb.org/manual/applications/replication/#read-preference) and [here](http://mongodb.github.com/node-mongodb-native/driver-articles/anintroductionto1_1and2_2.html#read-preferences).\n\n###slaveOk()\n\nSets the slaveOk option. `true` allows reading from secondaries.\n\n**deprecated** use [read()](#read) preferences instead if on mongodb >= 2.2\n\n```js\nquery.slaveOk() // true\nquery.slaveOk(true)\nquery.slaveOk(false)\n```\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/method/rs.slaveOk/)\n\n###snapshot()\n\nSpecifies this query as a snapshot query.\n\n```js\nmquery().snapshot() // true\nmquery().snapshot(true)\nmquery().snapshot(false)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB documentation](http://docs.mongodb.org/manual/reference/operator/snapshot/)\n\n###tailable()\n\nSets tailable option.\n\n```js\nmquery().tailable() <== true\nmquery().tailable(true)\nmquery().tailable(false)\n```\n\n_Cannot be used with `distinct()`._\n\n[MongoDB Documentation](http://docs.mongodb.org/manual/tutorial/create-tailable-cursor/)\n\n##Helpers\n\n###collection()\n\nSets the querys collection.\n\n```js\nmquery().collection(aCollection)\n```\n\n\n###merge(object)\n\nMerges other mquery or match condition objects into this one. When an muery instance is passed, its match conditions, field selection and options are merged.\n\n```js\nvar drum = mquery({ type: 'drum' }).collection(instruments);\nvar redDrum = mqery({ color: 'red' }).merge(drum);\nredDrum.count(function (err, n) {\n console.log('there are %d red drums', n);\n})\n```\n\nInternally uses `mquery.canMerge` to determine validity.\n\n###setOptions(options)\n\nSets query options.\n\n```js\nmquery().setOptions({ collection: coll, limit: 20 })\n```\n\n#####options\n\n- [tailable](#tailable) *\n- [sort](#sort) *\n- [limit](#limit) *\n- [skip](#skip) *\n- [maxScan](#maxScan) *\n- [batchSize](#batchSize) *\n- [comment](#comment) *\n- [snapshot](#snapshot) *\n- [hint](#hint) *\n- [slaveOk](#slaveOk) *\n- [safe](http://docs.mongodb.org/manual/reference/write-concern/): Boolean - passed through to the collection. Setting to `true` is equivalent to `{ w: 1 }`\n- [collection](#collection): the collection to query against\n\n_* denotes a query helper method is also available_\n\n###mquery.canMerge(conditions)\n\nDetermines if `conditions` can be merged using `mquery().merge()`.\n\n```js\nvar query = mquery({ type: 'drum' });\nvar okToMerge = mquery.canMerge(anObject)\nif (okToMerge) {\n query.merge(anObject);\n}\n```\n\n##mquery.use$geoWithin\n\nMongoDB 2.4 introduced the `$geoWithin` operator which replaces and is 100% backward compatible with `$within`. As of mquery 0.2, we default to using `$geoWithin` for all `within()` calls.\n\nIf you are running MongoDB < 2.4 this will be problematic. To force `mquery` to be backward compatible and always use `$within`, set the `mquery.use$geoWithin` flag to `false`.\n\n```js\nmquery.use$geoWithin = false;\n```\n\n##Custom Base Queries\n\nOften times we want custom base queries that encapsulate predefined criteria. With `mquery` this is easy. First create the query you want to reuse and call its `toConstructor()` method which returns a new subclass of `mquery` that retains all options and criteria of the original.\n\n```js\nvar greatMovies = mquery(movieCollection).where('rating').gte(4.5).toConstructor();\n\n// use it!\ngreatMovies().count(function (err, n) {\n console.log('There are %d great movies', n);\n});\n\ngreatMovies().where({ name: /^Life/ }).select('name').find(function (err, docs) {\n console.log(docs);\n});\n```\n\n##Validation\n\nMethod and options combinations are checked for validity at runtime to prevent creation of invalid query constructs. For example, a `distinct` query does not support specifying options like `hint` or field selection. In this case an error will be thrown so you can catch these mistakes in development.\n\n##Debug support\n\nDebug mode is provided through the use of the [debug](https://github.com/visionmedia/debug) module. To enable:\n\n DEBUG=mquery node yourprogram.js\n\nRead the debug module documentation for more details.\n\n##Future goals\n\n - mongo shell compatibility\n - browser compatibility\n - mongoose compatibility\n\n## Installation\n\n $ npm install mquery\n\n## License\n\n[MIT](https://github.com/aheckmann/mquery/blob/master/LICENSE)\n\n", - "readmeFilename": "README.md", - "_id": "mquery@0.4.1", - "dist": { - "shasum": "1488e7f806df9bce9536756ec9543e7afa060fee" - }, - "_from": "mquery@0.4.1", - "_resolved": "https://registry.npmjs.org/mquery/-/mquery-0.4.1.tgz" -} diff --git a/node_modules/mongoose/node_modules/ms/.npmignore b/node_modules/mongoose/node_modules/ms/.npmignore deleted file mode 100644 index 3c3629e64..000000000 --- a/node_modules/mongoose/node_modules/ms/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/mongoose/node_modules/ms/package.json b/node_modules/mongoose/node_modules/ms/package.json deleted file mode 100644 index fef38c189..000000000 --- a/node_modules/mongoose/node_modules/ms/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "ms", - "version": "0.1.0", - "description": "Tiny ms conversion utility", - "main": "./ms", - "devDependencies": { - "mocha": "*", - "expect.js": "*", - "serve": "*" - }, - "readme": "\n# ms.js\n\nEver find yourself doing math in your head or writing `1000 * 60 * 60 …`?\nDon't want to add obstrusive `Number` prototype extensions to your reusable\n/ distributable modules and projects?\n\n`ms` is a tiny utility that you can leverage when your application needs to\naccept a number of miliseconds as a parameter.\n\nIf a number is supplied to `ms`, it returns it immediately (e.g:\nIf a string that contains the number is supplied, it returns it immediately as\na number (e.g: it returns `100` for `'100'`).\n\nHowever, if you pass a string with a number and a valid unit, hte number of\nequivalent ms is returned.\n\n```js\nms('1d') // 86400000\nms('10h') // 36000000\nms('2h') // 7200000\nms('1m') // 60000\nms('5ms') // 5000\nms('100') // '100'\nms(100) // 100\n```\n\n## How to use\n\n### Node\n\n```js\nrequire('ms')\n```\n\n### Browser\n\n```html\n\n```\n\n## Credits\n\n(The MIT License)\n\nCopyright (c) 2011 Guillermo Rauch <guillermo@learnboost.com>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "README.md", - "_id": "ms@0.1.0", - "dist": { - "shasum": "a9cafc1b4536e693677598b9b375690ab208fab8" - }, - "_from": "ms@0.1.0", - "_resolved": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz" -} diff --git a/node_modules/mongoose/node_modules/muri/package.json b/node_modules/mongoose/node_modules/muri/package.json deleted file mode 100644 index 072ecc072..000000000 --- a/node_modules/mongoose/node_modules/muri/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "muri", - "version": "0.3.1", - "description": "MongoDB URI parser", - "main": "index.js", - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/muri.git" - }, - "keywords": [ - "mongodb", - "uri", - "parser" - ], - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "devDependencies": { - "mocha": "1.6.0" - }, - "readme": "#Meet Muri!\n\nMuri is your friendly neighborhood [MongoDB URI](http://www.mongodb.org/display/DOCS/Connections) parser for Node.js.\n\n\n###Install\n\n $ npm install muri\n\n###Use\n\n```js\n var muri = require('muri');\n var o = muri('mongodb://user:pass@local,remote:27018,japan:27019/neatdb?replicaSet=myreplset&journal=true&w=2&wtimeoutMS=50');\n\n console.log(o);\n\n { hosts: [ { host: 'local', port: 27017 },\n { host: 'remote', port: 27018 },\n { host: 'japan', port: 27019 } ],\n db: 'neatdb',\n options: {\n replicaSet: 'myreplset',\n journal: true,\n w: 2,\n wtimeoutMS: 50\n },\n auth: {\n user: 'user',\n pass: 'pass'\n }\n }\n```\n\n### Details\n\nThe returned object contains the following properties:\n\n- db: the name of the database. defaults to \"admin\" if not specified\n- auth: if auth is specified, this object will exist `{ user: 'username', pass: 'password' }`\n- hosts: array of host/port objects, one for each specified `[{ host: 'local', port: 27107 }, { host: '..', port: port }]`\n - if a port is not specified for a given host, the default port (27017) is used\n - if a unix domain socket is passed, host/port will be undefined and `ipc` will be set to the value specified `[{ ipc: '/tmp/mongodb-27017' }]`\n- options: this is a hash of all options specified in the querystring\n\n[LICENSE](https://github.com/aheckmann/muri/blob/master/LICENSE)\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/aheckmann/muri/issues" - }, - "homepage": "https://github.com/aheckmann/muri", - "_id": "muri@0.3.1", - "dist": { - "shasum": "b2d844c72f626171850616c73c9426b3b0b9a9ab" - }, - "_from": "muri@0.3.1", - "_resolved": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz" -} diff --git a/node_modules/mongoose/node_modules/regexp-clone/package.json b/node_modules/mongoose/node_modules/regexp-clone/package.json deleted file mode 100644 index 312b6c25c..000000000 --- a/node_modules/mongoose/node_modules/regexp-clone/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "regexp-clone", - "version": "0.0.1", - "description": "Clone RegExps with options", - "main": "index.js", - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/regexp-clone.git" - }, - "keywords": [ - "RegExp", - "clone" - ], - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "devDependencies": { - "mocha": "1.8.1" - }, - "readme": "#regexp-clone\n==============\n\nClones RegExps with flag preservation\n\n```js\nvar regexpClone = require('regexp-clone');\n\nvar a = /somethin/g;\nconsole.log(a.global); // true\n\nvar b = regexpClone(a);\nconsole.log(b.global); // true\n```\n\n## License\n\n[MIT](https://github.com/aheckmann/regexp-clone/blob/master/LICENSE)\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/aheckmann/regexp-clone/issues" - }, - "homepage": "https://github.com/aheckmann/regexp-clone", - "_id": "regexp-clone@0.0.1", - "dist": { - "shasum": "b23928113b84b20e2e5f61eade4a3e54c897104b" - }, - "_from": "regexp-clone@0.0.1", - "_resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" -} diff --git a/node_modules/mongoose/node_modules/sliced/package.json b/node_modules/mongoose/node_modules/sliced/package.json deleted file mode 100644 index 3e7cb48ef..000000000 --- a/node_modules/mongoose/node_modules/sliced/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "sliced", - "version": "0.0.5", - "description": "A faster Node.js alternative to Array.prototype.slice.call(arguments)", - "main": "index.js", - "scripts": { - "test": "make test" - }, - "repository": { - "type": "git", - "url": "git://github.com/aheckmann/sliced" - }, - "keywords": [ - "arguments", - "slice", - "array" - ], - "author": { - "name": "Aaron Heckmann", - "email": "aaron.heckmann+github@gmail.com" - }, - "license": "MIT", - "devDependencies": { - "mocha": "1.5.0", - "benchmark": "~1.0.0" - }, - "readme": "#sliced\n==========\n\nA faster alternative to `[].slice.call(arguments)`.\n\n[![Build Status](https://secure.travis-ci.org/aheckmann/sliced.png)](http://travis-ci.org/aheckmann/sliced)\n\nExample output from [benchmark.js](https://github.com/bestiejs/benchmark.js)\n\n Array.prototype.slice.call x 1,320,205 ops/sec ±2.35% (92 runs sampled)\n [].slice.call x 1,314,605 ops/sec ±1.60% (95 runs sampled)\n cached slice.call x 10,468,380 ops/sec ±1.45% (95 runs sampled)\n sliced x 16,608,237 ops/sec ±1.40% (92 runs sampled)\n fastest is sliced\n\n Array.prototype.slice.call(arguments, 1) x 1,383,584 ops/sec ±1.73% (97 runs sampled)\n [].slice.call(arguments, 1) x 1,494,735 ops/sec ±1.33% (95 runs sampled)\n cached slice.call(arguments, 1) x 10,085,270 ops/sec ±1.51% (97 runs sampled)\n sliced(arguments, 1) x 16,620,480 ops/sec ±1.29% (95 runs sampled)\n fastest is sliced(arguments, 1)\n\n Array.prototype.slice.call(arguments, -1) x 1,303,262 ops/sec ±1.62% (94 runs sampled)\n [].slice.call(arguments, -1) x 1,325,615 ops/sec ±1.36% (97 runs sampled)\n cached slice.call(arguments, -1) x 9,673,603 ops/sec ±1.70% (96 runs sampled)\n sliced(arguments, -1) x 16,384,575 ops/sec ±1.06% (91 runs sampled)\n fastest is sliced(arguments, -1)\n\n Array.prototype.slice.call(arguments, -2, -10) x 1,404,390 ops/sec ±1.61% (95 runs sampled)\n [].slice.call(arguments, -2, -10) x 1,514,367 ops/sec ±1.21% (96 runs sampled)\n cached slice.call(arguments, -2, -10) x 9,836,017 ops/sec ±1.21% (95 runs sampled)\n sliced(arguments, -2, -10) x 18,544,882 ops/sec ±1.30% (91 runs sampled)\n fastest is sliced(arguments, -2, -10)\n\n Array.prototype.slice.call(arguments, -2, -1) x 1,458,604 ops/sec ±1.41% (97 runs sampled)\n [].slice.call(arguments, -2, -1) x 1,536,547 ops/sec ±1.63% (99 runs sampled)\n cached slice.call(arguments, -2, -1) x 10,060,633 ops/sec ±1.37% (96 runs sampled)\n sliced(arguments, -2, -1) x 18,608,712 ops/sec ±1.08% (93 runs sampled)\n fastest is sliced(arguments, -2, -1)\n\n_Benchmark [source](https://github.com/aheckmann/sliced/blob/master/bench.js)._\n\n##Usage\n\n`sliced` accepts the same arguments as `Array#slice` so you can easily swap it out.\n\n```js\nfunction zing () {\n var slow = [].slice.call(arguments, 1, 8);\n var args = slice(arguments, 1, 8);\n\n var slow = Array.prototype.slice.call(arguments);\n var args = slice(arguments);\n // etc\n}\n```\n\n## install\n\n npm install sliced\n\n\n[LICENSE](https://github.com/aheckmann/sliced/blob/master/LICENSE)\n", - "readmeFilename": "README.md", - "bugs": { - "url": "https://github.com/aheckmann/sliced/issues" - }, - "homepage": "https://github.com/aheckmann/sliced", - "_id": "sliced@0.0.5", - "dist": { - "shasum": "67055cdf2761a8f68bd98894469179e99bbd83e0" - }, - "_from": "sliced@0.0.5", - "_resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" -} diff --git a/node_modules/mongoose/package.json b/node_modules/mongoose/package.json index 1a14cdbf7..33a421976 100644 --- a/node_modules/mongoose/package.json +++ b/node_modules/mongoose/package.json @@ -1,70 +1,91 @@ { - "name": "mongoose", - "description": "Mongoose MongoDB ODM", - "version": "3.8.4", + "_from": "mongoose@3.8.4", + "_id": "mongoose@3.8.4", + "_inBundle": false, + "_integrity": "sha1-etSJRNFiE+sWwU7F+9Jc+Q2MWGw=", + "_location": "/mongoose", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mongoose@3.8.4", + "name": "mongoose", + "escapedName": "mongoose", + "rawSpec": "3.8.4", + "saveSpec": null, + "fetchSpec": "3.8.4" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/mongoose/-/mongoose-3.8.4.tgz", + "_shasum": "7ad48944d16213eb16c14ec5fbd25cf90d8c586c", + "_spec": "mongoose@3.8.4", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton", "author": { "name": "Guillermo Rauch", "email": "guillermo@learnboost.com" }, - "keywords": [ - "mongodb", - "document", - "model", - "schema", - "database", - "odm", - "data", - "datastore", - "query", - "nosql", - "orm", - "db" - ], + "bugs": { + "url": "https://github.com/learnboost/mongoose/issues/new", + "email": "mongoose-orm@googlegroups.com" + }, + "bundleDependencies": false, "dependencies": { "hooks": "0.2.1", "mongodb": "1.3.23", + "mpath": "0.1.1", + "mpromise": "0.4.3", + "mquery": "0.4.1", "ms": "0.1.0", - "sliced": "0.0.5", "muri": "0.3.1", - "mpromise": "0.4.3", - "mpath": "0.1.1", "regexp-clone": "0.0.1", - "mquery": "0.4.1" + "sliced": "0.0.5" }, + "deprecated": false, + "description": "Mongoose MongoDB ODM", "devDependencies": { - "mocha": "1.12.0", - "node-static": "0.5.9", + "async": "0.2.5", + "benchmark": "1.0.0", "dox": "0.3.1", - "jade": "0.26.3", "highlight.js": "7.0.1", + "jade": "0.26.3", "markdown": "0.3.1", - "promises-aplus-tests": ">= 1.0.2", - "tbd": "0.6.4", - "benchmark": "1.0.0", + "mocha": "1.12.0", + "node-static": "0.5.9", "open": "0.0.3", - "async": "0.2.5" + "promises-aplus-tests": ">= 1.0.2", + "tbd": "0.6.4" }, "directories": { "lib": "./lib/mongoose" }, - "scripts": { - "test": "make test" - }, - "main": "./index.js", "engines": { "node": ">=0.6.19" }, - "bugs": { - "url": "https://github.com/learnboost/mongoose/issues/new", - "email": "mongoose-orm@googlegroups.com" - }, + "homepage": "http://mongoosejs.com", + "keywords": [ + "mongodb", + "document", + "model", + "schema", + "database", + "odm", + "data", + "datastore", + "query", + "nosql", + "orm", + "db" + ], + "main": "./index.js", + "name": "mongoose", "repository": { "type": "git", "url": "git://github.com/LearnBoost/mongoose.git" }, - "homepage": "http://mongoosejs.com", - "readme": "# Mongoose\n\nMongoose is a [MongoDB](http://www.mongodb.org/) object modeling tool designed to work in an asynchronous environment.\n\n[![Build Status](https://travis-ci.org/LearnBoost/mongoose.png?branch=3.8.x)](https://travis-ci.org/LearnBoost/mongoose)\n\n## Documentation\n\n[mongoosejs.com](http://mongoosejs.com/)\n\n## Support\n\n - [Stack Overflow](http://stackoverflow.com/questions/tagged/mongoose)\n - [bug reports](https://github.com/learnboost/mongoose/issues/)\n - [help forum](http://groups.google.com/group/mongoose-orm)\n - [MongoDB support](http://www.mongodb.org/display/DOCS/Technical+Support)\n - (irc) #mongoosejs on freenode\n\n## Plugins\n\nCheck out the [plugins search site](http://plugins.mongoosejs.com/) to see hundreds of related modules from the community.\n\n## Contributors\n\nView all 100+ [contributors](https://github.com/learnboost/mongoose/graphs/contributors). Stand up and be counted as a [contributor](https://github.com/LearnBoost/mongoose/blob/master/CONTRIBUTING.md) too!\n\n## Live Examples\n\n\n## Installation\n\nFirst install [node.js](http://nodejs.org/) and [mongodb](http://www.mongodb.org/downloads). Then:\n\n $ npm install mongoose\n \n## Stability\n\nThe current stable branch is [3.8.x](https://github.com/LearnBoost/mongoose/tree/3.8.x). New (unstable) development always occurs on the [master](https://github.com/LearnBoost/mongoose/tree/master) branch.\n\n## Overview\n\n### Connecting to MongoDB\n\nFirst, we need to define a connection. If your app uses only one database, you should use `mongoose.connect`. If you need to create additional connections, use `mongoose.createConnection`.\n\nBoth `connect` and `createConnection` take a `mongodb://` URI, or the parameters `host, database, port, options`.\n\n var mongoose = require('mongoose');\n\n mongoose.connect('mongodb://localhost/my_database');\n\nOnce connected, the `open` event is fired on the `Connection` instance. If you're using `mongoose.connect`, the `Connection` is `mongoose.connection`. Otherwise, `mongoose.createConnection` return value is a `Connection`.\n\n**Important!** Mongoose buffers all the commands until it's connected to the database. This means that you don't have to wait until it connects to MongoDB in order to define models, run queries, etc.\n\n### Defining a Model\n\nModels are defined through the `Schema` interface. \n\n var Schema = mongoose.Schema\n , ObjectId = Schema.ObjectId;\n\n var BlogPost = new Schema({\n author : ObjectId\n , title : String\n , body : String\n , date : Date\n });\n\nAside from defining the structure of your documents and the types of data you're storing, a Schema handles the definition of:\n\n* [Validators](http://mongoosejs.com/docs/validation.html) (async and sync)\n* [Defaults](http://mongoosejs.com/docs/api.html#schematype_SchemaType-default)\n* [Getters](http://mongoosejs.com/docs/api.html#schematype_SchemaType-get)\n* [Setters](http://mongoosejs.com/docs/api.html#schematype_SchemaType-set)\n* [Indexes](http://mongoosejs.com/docs/guide.html#indexes)\n* [Middleware](http://mongoosejs.com/docs/middleware.html)\n* [Methods](http://mongoosejs.com/docs/guide.html#methods) definition\n* [Statics](http://mongoosejs.com/docs/guide.html#statics) definition\n* [Plugins](http://mongoosejs.com/docs/plugins.html)\n* [pseudo-JOINs](http://mongoosejs.com/docs/populate.html)\n\nThe following example shows some of these features:\n\n var Comment = new Schema({\n name : { type: String, default: 'hahaha' }\n , age : { type: Number, min: 18, index: true }\n , bio : { type: String, match: /[a-z]/ }\n , date : { type: Date, default: Date.now }\n , buff : Buffer\n });\n\n // a setter\n Comment.path('name').set(function (v) {\n return capitalize(v);\n });\n\n // middleware\n Comment.pre('save', function (next) {\n notify(this.get('email'));\n next();\n });\n\nTake a look at the example in `examples/schema.js` for an end-to-end example of a typical setup.\n\n### Accessing a Model\n\nOnce we define a model through `mongoose.model('ModelName', mySchema)`, we can access it through the same function\n\n var myModel = mongoose.model('ModelName');\n\nOr just do it all at once\n\n var MyModel = mongoose.model('ModelName', mySchema);\n\nWe can then instantiate it, and save it:\n\n var instance = new MyModel();\n instance.my.key = 'hello';\n instance.save(function (err) {\n //\n });\n\nOr we can find documents from the same collection\n\n MyModel.find({}, function (err, docs) {\n // docs.forEach\n });\n\nYou can also `findOne`, `findById`, `update`, etc. For more details check out [the docs](http://mongoosejs.com/docs/queries.html).\n\n**Important!** If you opened a separate connection using `mongoose.createConnection()` but attempt to access the model through `mongoose.model('ModelName')` it will not work as expected since it is not hooked up to an active db connection. In this case access your model through the connection you created:\n\n var conn = mongoose.createConnection('your connection string');\n var MyModel = conn.model('ModelName', schema);\n var m = new MyModel;\n m.save() // works\n\n vs\n\n var conn = mongoose.createConnection('your connection string');\n var MyModel = mongoose.model('ModelName', schema);\n var m = new MyModel;\n m.save() // does not work b/c the default connection object was never connected\n\n### Embedded Documents\n\nIn the first example snippet, we defined a key in the Schema that looks like:\n\n comments: [Comments]\n\nWhere `Comments` is a `Schema` we created. This means that creating embedded documents is as simple as:\n\n // retrieve my model\n var BlogPost = mongoose.model('BlogPost');\n\n // create a blog post\n var post = new BlogPost();\n\n // create a comment\n post.comments.push({ title: 'My comment' });\n\n post.save(function (err) {\n if (!err) console.log('Success!');\n });\n\nThe same goes for removing them:\n\n BlogPost.findById(myId, function (err, post) {\n if (!err) {\n post.comments[0].remove();\n post.save(function (err) {\n // do something\n });\n }\n });\n\nEmbedded documents enjoy all the same features as your models. Defaults, validators, middleware. Whenever an error occurs, it's bubbled to the `save()` error callback, so error handling is a snap!\n\nMongoose interacts with your embedded documents in arrays _atomically_, out of the box.\n\n### Middleware\n\nSee the [docs](http://mongoosejs.com/docs/middleware.html) page.\n\n#### Intercepting and mutating method arguments\n\nYou can intercept method arguments via middleware.\n\nFor example, this would allow you to broadcast changes about your Documents every time someone `set`s a path in your Document to a new value:\n\n schema.pre('set', function (next, path, val, typel) {\n // `this` is the current Document\n this.emit('set', path, val);\n\n // Pass control to the next pre\n next();\n });\n\nMoreover, you can mutate the incoming `method` arguments so that subsequent middleware see different values for those arguments. To do so, just pass the new values to `next`:\n\n .pre(method, function firstPre (next, methodArg1, methodArg2) {\n // Mutate methodArg1\n next(\"altered-\" + methodArg1.toString(), methodArg2);\n })\n\n // pre declaration is chainable\n .pre(method, function secondPre (next, methodArg1, methodArg2) {\n console.log(methodArg1);\n // => 'altered-originalValOfMethodArg1' \n \n console.log(methodArg2);\n // => 'originalValOfMethodArg2' \n \n // Passing no arguments to `next` automatically passes along the current argument values\n // i.e., the following `next()` is equivalent to `next(methodArg1, methodArg2)`\n // and also equivalent to, with the example method arg \n // values, `next('altered-originalValOfMethodArg1', 'originalValOfMethodArg2')`\n next();\n })\n\n#### Schema gotcha\n\n`type`, when used in a schema has special meaning within Mongoose. If your schema requires using `type` as a nested property you must use object notation:\n\n new Schema({\n broken: { type: Boolean }\n , asset : {\n name: String\n , type: String // uh oh, it broke. asset will be interpreted as String\n }\n });\n\n new Schema({\n works: { type: Boolean }\n , asset : {\n name: String\n , type: { type: String } // works. asset is an object with a type property\n }\n });\n\n### Driver access\n\nThe driver being used defaults to [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) and is directly accessible through `YourModel.collection`. **Note**: using the driver directly bypasses all Mongoose power-tools like validation, getters, setters, hooks, etc.\n\n## API Docs\n\nFind the API docs [here](http://mongoosejs.com/docs/api.html), generated using [dox](http://github.com/visionmedia/dox).\n\n## License\n\nCopyright (c) 2010 LearnBoost <dev@learnboost.com>\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", - "readmeFilename": "README.md", - "_id": "mongoose@3.8.4", - "_from": "mongoose@*" + "scripts": { + "test": "make test" + }, + "version": "3.8.4" } diff --git a/node_modules/mongoose/node_modules/mpath/.npmignore b/node_modules/mpath/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/mpath/.npmignore rename to node_modules/mpath/.npmignore diff --git a/node_modules/mongoose/node_modules/mpath/.travis.yml b/node_modules/mpath/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/mpath/.travis.yml rename to node_modules/mpath/.travis.yml diff --git a/node_modules/mongoose/node_modules/mpath/History.md b/node_modules/mpath/History.md similarity index 100% rename from node_modules/mongoose/node_modules/mpath/History.md rename to node_modules/mpath/History.md diff --git a/node_modules/mongoose/node_modules/mpath/LICENSE b/node_modules/mpath/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/mpath/LICENSE rename to node_modules/mpath/LICENSE diff --git a/node_modules/mongoose/node_modules/mpath/Makefile b/node_modules/mpath/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mpath/Makefile rename to node_modules/mpath/Makefile diff --git a/node_modules/mongoose/node_modules/mpath/README.md b/node_modules/mpath/README.md similarity index 100% rename from node_modules/mongoose/node_modules/mpath/README.md rename to node_modules/mpath/README.md diff --git a/node_modules/mongoose/node_modules/mpath/index.js b/node_modules/mpath/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mpath/index.js rename to node_modules/mpath/index.js diff --git a/node_modules/mongoose/node_modules/mpath/lib/index.js b/node_modules/mpath/lib/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mpath/lib/index.js rename to node_modules/mpath/lib/index.js diff --git a/node_modules/mpath/package.json b/node_modules/mpath/package.json new file mode 100644 index 000000000..0cd912951 --- /dev/null +++ b/node_modules/mpath/package.json @@ -0,0 +1,56 @@ +{ + "_from": "mpath@0.1.1", + "_id": "mpath@0.1.1", + "_inBundle": false, + "_integrity": "sha1-I9qFK3wjLuCX9HWdKcDunNItXkY=", + "_location": "/mpath", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mpath@0.1.1", + "name": "mpath", + "escapedName": "mpath", + "rawSpec": "0.1.1", + "saveSpec": null, + "fetchSpec": "0.1.1" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz", + "_shasum": "23da852b7c232ee097f4759d29c0ee9cd22d5e46", + "_spec": "mpath@0.1.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/mpath/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "{G,S}et object values using MongoDB path notation", + "devDependencies": { + "mocha": "1.6.0" + }, + "homepage": "https://github.com/aheckmann/mpath#readme", + "keywords": [ + "mongodb", + "path", + "get", + "set" + ], + "license": "MIT", + "main": "index.js", + "name": "mpath", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/mpath.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.1.1" +} diff --git a/node_modules/mongoose/node_modules/mpath/test/index.js b/node_modules/mpath/test/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mpath/test/index.js rename to node_modules/mpath/test/index.js diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/.name b/node_modules/mpromise/.idea/.name similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/.idea/.name rename to node_modules/mpromise/.idea/.name diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/codeStyleSettings.xml b/node_modules/mpromise/.idea/codeStyleSettings.xml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/codeStyleSettings.xml rename to node_modules/mpromise/.idea/codeStyleSettings.xml index b543f7797..ffb445785 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/codeStyleSettings.xml +++ b/node_modules/mpromise/.idea/codeStyleSettings.xml @@ -1,33 +1,33 @@ - - - - - - - + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/encodings.xml b/node_modules/mpromise/.idea/encodings.xml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/encodings.xml rename to node_modules/mpromise/.idea/encodings.xml index 7c62b52a1..e206d70d8 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/encodings.xml +++ b/node_modules/mpromise/.idea/encodings.xml @@ -1,5 +1,5 @@ - - - - - + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml b/node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml similarity index 98% rename from node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml rename to node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml index f14638f03..9d51f7c1c 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml +++ b/node_modules/mpromise/.idea/inspectionProfiles/Project_Default.xml @@ -1,8 +1,8 @@ - - - + + + \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml b/node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml rename to node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml index 6933c1eaf..3b312839b 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml +++ b/node_modules/mpromise/.idea/inspectionProfiles/profiles_settings.xml @@ -1,7 +1,7 @@ - - - + + + \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/jsLibraryMappings.xml b/node_modules/mpromise/.idea/jsLibraryMappings.xml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/jsLibraryMappings.xml rename to node_modules/mpromise/.idea/jsLibraryMappings.xml index 0a66fda70..dddfa3798 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/jsLibraryMappings.xml +++ b/node_modules/mpromise/.idea/jsLibraryMappings.xml @@ -1,10 +1,10 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml b/node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml similarity index 96% rename from node_modules/mongoose/node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml rename to node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml index bd5563574..adedc9105 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml +++ b/node_modules/mpromise/.idea/libraries/Node_js_Dependencies_for_mpromise.xml @@ -1,13 +1,13 @@ - - - - - - - - - - - - + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/misc.xml b/node_modules/mpromise/.idea/misc.xml similarity index 96% rename from node_modules/mongoose/node_modules/mpromise/.idea/misc.xml rename to node_modules/mpromise/.idea/misc.xml index bad1ff1bc..80d3a3922 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/misc.xml +++ b/node_modules/mpromise/.idea/misc.xml @@ -1,8 +1,8 @@ - - - - - - - + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/modules.xml b/node_modules/mpromise/.idea/modules.xml similarity index 96% rename from node_modules/mongoose/node_modules/mpromise/.idea/modules.xml rename to node_modules/mpromise/.idea/modules.xml index ff5992df3..4a9773f70 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/modules.xml +++ b/node_modules/mpromise/.idea/modules.xml @@ -1,9 +1,9 @@ - - - - - - - - - + + + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/mpromise.iml b/node_modules/mpromise/.idea/mpromise.iml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/mpromise.iml rename to node_modules/mpromise/.idea/mpromise.iml index a076124fe..c6a5d573b 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/mpromise.iml +++ b/node_modules/mpromise/.idea/mpromise.iml @@ -1,12 +1,12 @@ - - - - - - - - - - - - + + + + + + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/other.xml b/node_modules/mpromise/.idea/other.xml similarity index 95% rename from node_modules/mongoose/node_modules/mpromise/.idea/other.xml rename to node_modules/mpromise/.idea/other.xml index 82f7abedc..6f249c9e0 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/other.xml +++ b/node_modules/mpromise/.idea/other.xml @@ -1,7 +1,7 @@ - - - - - - - + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/scopes/scope_settings.xml b/node_modules/mpromise/.idea/scopes/scope_settings.xml similarity index 97% rename from node_modules/mongoose/node_modules/mpromise/.idea/scopes/scope_settings.xml rename to node_modules/mpromise/.idea/scopes/scope_settings.xml index 0d5175ca0..922003b84 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/scopes/scope_settings.xml +++ b/node_modules/mpromise/.idea/scopes/scope_settings.xml @@ -1,5 +1,5 @@ - - - + + + \ No newline at end of file diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/vcs.xml b/node_modules/mpromise/.idea/vcs.xml similarity index 96% rename from node_modules/mongoose/node_modules/mpromise/.idea/vcs.xml rename to node_modules/mpromise/.idea/vcs.xml index ab55cf163..c80f2198b 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/vcs.xml +++ b/node_modules/mpromise/.idea/vcs.xml @@ -1,7 +1,7 @@ - - - - - - - + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.idea/workspace.xml b/node_modules/mpromise/.idea/workspace.xml similarity index 98% rename from node_modules/mongoose/node_modules/mpromise/.idea/workspace.xml rename to node_modules/mpromise/.idea/workspace.xml index 1335fab06..13d9d199d 100644 --- a/node_modules/mongoose/node_modules/mpromise/.idea/workspace.xml +++ b/node_modules/mpromise/.idea/workspace.xml @@ -1,626 +1,626 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - C:\empeeric\mpromise\node_modules\mocha - - - - - - - - - - - - JavaScript - - - JavaScript validity issuesJavaScript - - - - - FunctionWithInconsistentReturnsJS - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $PROJECT_DIR$ - - true - - BDD - - false - - - - - - - - $PROJECT_DIR$/ - $PROJECT_DIR$/test - true - - BDD - - false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - file://$USER_HOME$/.WebStorm7/system/extLibs/nodejs-v0.8.26-src/core-modules-sources/lib/events.js - 54 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Source - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + C:\empeeric\mpromise\node_modules\mocha + + + + + + + + + + + + JavaScript + + + JavaScript validity issuesJavaScript + + + + + FunctionWithInconsistentReturnsJS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $PROJECT_DIR$ + + true + + BDD + + false + + + + + + + + $PROJECT_DIR$/ + $PROJECT_DIR$/test + true + + BDD + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + file://$USER_HOME$/.WebStorm7/system/extLibs/nodejs-v0.8.26-src/core-modules-sources/lib/events.js + 54 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Source + + + + + + + + + + + + diff --git a/node_modules/mongoose/node_modules/mpromise/.npmignore b/node_modules/mpromise/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/.npmignore rename to node_modules/mpromise/.npmignore diff --git a/node_modules/mongoose/node_modules/mpromise/.travis.yml b/node_modules/mpromise/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/.travis.yml rename to node_modules/mpromise/.travis.yml diff --git a/node_modules/mongoose/node_modules/mpromise/History.md b/node_modules/mpromise/History.md similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/History.md rename to node_modules/mpromise/History.md diff --git a/node_modules/mongoose/node_modules/mpromise/LICENSE b/node_modules/mpromise/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/LICENSE rename to node_modules/mpromise/LICENSE diff --git a/node_modules/mongoose/node_modules/mpromise/README.md b/node_modules/mpromise/README.md similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/README.md rename to node_modules/mpromise/README.md diff --git a/node_modules/mongoose/node_modules/mpromise/index.js b/node_modules/mpromise/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/index.js rename to node_modules/mpromise/index.js diff --git a/node_modules/mongoose/node_modules/mpromise/lib/promise.js b/node_modules/mpromise/lib/promise.js similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/lib/promise.js rename to node_modules/mpromise/lib/promise.js diff --git a/node_modules/mpromise/package.json b/node_modules/mpromise/package.json new file mode 100644 index 000000000..5e18485b6 --- /dev/null +++ b/node_modules/mpromise/package.json @@ -0,0 +1,59 @@ +{ + "_from": "mpromise@0.4.3", + "_id": "mpromise@0.4.3", + "_inBundle": false, + "_integrity": "sha1-7cR6daKhd7DpOCc121Lb7DgIzDM=", + "_location": "/mpromise", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mpromise@0.4.3", + "name": "mpromise", + "escapedName": "mpromise", + "rawSpec": "0.4.3", + "saveSpec": null, + "fetchSpec": "0.4.3" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.4.3.tgz", + "_shasum": "edc47a75a2a177b0e9382735db52dbec3808cc33", + "_spec": "mpromise@0.4.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/mpromise/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Promises A+ conformant implementation", + "devDependencies": { + "longjohn": "~0.2.1", + "mocha": "~1.13.0", + "promises-aplus-tests": "~2.0.2" + }, + "homepage": "https://github.com/aheckmann/mpromise#readme", + "keywords": [ + "promise", + "mongoose", + "aplus", + "a+", + "plus" + ], + "license": "MIT", + "main": "index.js", + "name": "mpromise", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/mpromise.git" + }, + "scripts": { + "test": "node node_modules/mocha/bin/_mocha" + }, + "version": "0.4.3" +} diff --git a/node_modules/mongoose/node_modules/mpromise/test/promise.domain.test.js b/node_modules/mpromise/test/promise.domain.test.js similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/test/promise.domain.test.js rename to node_modules/mpromise/test/promise.domain.test.js diff --git a/node_modules/mongoose/node_modules/mpromise/test/promise.test.js b/node_modules/mpromise/test/promise.test.js similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/test/promise.test.js rename to node_modules/mpromise/test/promise.test.js diff --git a/node_modules/mongoose/node_modules/mpromise/test/promises.Aplus.js b/node_modules/mpromise/test/promises.Aplus.js similarity index 100% rename from node_modules/mongoose/node_modules/mpromise/test/promises.Aplus.js rename to node_modules/mpromise/test/promises.Aplus.js diff --git a/node_modules/mongoose/node_modules/mquery/.npmignore b/node_modules/mquery/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/mquery/.npmignore rename to node_modules/mquery/.npmignore diff --git a/node_modules/mongoose/node_modules/mquery/.travis.yml b/node_modules/mquery/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/mquery/.travis.yml rename to node_modules/mquery/.travis.yml diff --git a/node_modules/mongoose/node_modules/mquery/History.md b/node_modules/mquery/History.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/History.md rename to node_modules/mquery/History.md diff --git a/node_modules/mongoose/node_modules/mquery/LICENSE b/node_modules/mquery/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/mquery/LICENSE rename to node_modules/mquery/LICENSE diff --git a/node_modules/mongoose/node_modules/mquery/Makefile b/node_modules/mquery/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mquery/Makefile rename to node_modules/mquery/Makefile diff --git a/node_modules/mongoose/node_modules/mquery/README.md b/node_modules/mquery/README.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/README.md rename to node_modules/mquery/README.md diff --git a/node_modules/mongoose/node_modules/mquery/index.js b/node_modules/mquery/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/index.js rename to node_modules/mquery/index.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/collection/collection.js b/node_modules/mquery/lib/collection/collection.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/collection/collection.js rename to node_modules/mquery/lib/collection/collection.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/collection/index.js b/node_modules/mquery/lib/collection/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/collection/index.js rename to node_modules/mquery/lib/collection/index.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/collection/node.js b/node_modules/mquery/lib/collection/node.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/collection/node.js rename to node_modules/mquery/lib/collection/node.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/env.js b/node_modules/mquery/lib/env.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/env.js rename to node_modules/mquery/lib/env.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/mquery.js b/node_modules/mquery/lib/mquery.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/mquery.js rename to node_modules/mquery/lib/mquery.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/permissions.js b/node_modules/mquery/lib/permissions.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/permissions.js rename to node_modules/mquery/lib/permissions.js diff --git a/node_modules/mongoose/node_modules/mquery/lib/utils.js b/node_modules/mquery/lib/utils.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/lib/utils.js rename to node_modules/mquery/lib/utils.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/.travis.yml b/node_modules/mquery/node_modules/bson/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/.travis.yml rename to node_modules/mquery/node_modules/bson/.travis.yml diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/Makefile b/node_modules/mquery/node_modules/bson/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/Makefile rename to node_modules/mquery/node_modules/bson/Makefile diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/README.md b/node_modules/mquery/node_modules/bson/README.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/README.md rename to node_modules/mquery/node_modules/bson/README.md diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/binding.gyp b/node_modules/mquery/node_modules/bson/binding.gyp similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/binding.gyp rename to node_modules/mquery/node_modules/bson/binding.gyp diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/browser_build/bson.js b/node_modules/mquery/node_modules/bson/browser_build/bson.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/browser_build/bson.js rename to node_modules/mquery/node_modules/bson/browser_build/bson.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/browser_build/package.json b/node_modules/mquery/node_modules/bson/browser_build/package.json similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/browser_build/package.json rename to node_modules/mquery/node_modules/bson/browser_build/package.json diff --git a/node_modules/mongodb/node_modules/bson/build/Makefile b/node_modules/mquery/node_modules/bson/build/Makefile similarity index 75% rename from node_modules/mongodb/node_modules/bson/build/Makefile rename to node_modules/mquery/node_modules/bson/build/Makefile index 12b35d60f..ef3ef8f15 100644 --- a/node_modules/mongodb/node_modules/bson/build/Makefile +++ b/node_modules/mquery/node_modules/bson/build/Makefile @@ -41,30 +41,22 @@ all_deps := CC.target ?= $(CC) -CFLAGS.target ?= $(CFLAGS) +CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) CXX.target ?= $(CXX) -CXXFLAGS.target ?= $(CXXFLAGS) +CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) LINK.target ?= $(LINK) LDFLAGS.target ?= $(LDFLAGS) AR.target ?= $(AR) # C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= flock $(builddir)/linker.lock $(CXX.target) +LINK ?= $(CXX.target) # TODO(evan): move all cross-compilation logic to gyp-time so we don't need # to replicate this environment fallback in make as well. CC.host ?= gcc -CFLAGS.host ?= +CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) CXX.host ?= g++ -CXXFLAGS.host ?= +CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) LINK.host ?= $(CXX.host) LDFLAGS.host ?= AR.host ?= ar @@ -134,6 +126,34 @@ cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $ quiet_cmd_cxx = CXX($(TOOLSET)) $@ cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" + quiet_cmd_touch = TOUCH $@ cmd_touch = touch $@ @@ -141,39 +161,17 @@ quiet_cmd_copy = COPY $@ # send stderr to /dev/null to ignore messages when linking directories. cmd_copy = rm -rf "$@" && cp -af "$<" "$@" -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) # Define an escape_quotes function to escape single quotes. @@ -238,7 +236,7 @@ define do_cmd $(if $(or $(command_changed),$(prereq_changed)), @$(call exact_echo, $($(quiet)cmd_$(1))) @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word 1,$(cmd_$1))), + $(if $(findstring flock,$(word 2,$(cmd_$1))), @$(cmd_$(1)) @echo " $(quiet_cmd_$(1)): Finished", @$(cmd_$(1)) @@ -276,6 +274,10 @@ $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD @@ -290,6 +292,10 @@ $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD @@ -303,6 +309,10 @@ $(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD @$(call do_cmd,cxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD @$(call do_cmd,cc,1) $(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD @@ -316,8 +326,8 @@ ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ endif quiet_cmd_regen_makefile = ACTION Regenerating $@ -cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/home/vagrant/lab4/node_modules/mongodb/node_modules/bson/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/home/vagrant/.node-gyp/0.10.24/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/home/vagrant/.node-gyp/0.10.24" "-Dmodule_root_dir=/home/vagrant/lab4/node_modules/mongodb/node_modules/bson" binding.gyp -Makefile: $(srcdir)/../../../../../.node-gyp/0.10.24/common.gypi $(srcdir)/../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp +cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mquery/node_modules/bson/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4" "-Dnode_gyp_dir=/usr/local/lib/node_modules/npm/node_modules/node-gyp" "-Dnode_lib_file=/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/<(target_arch)/node.lib" "-Dmodule_root_dir=/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mquery/node_modules/bson" "-Dnode_engine=v8" binding.gyp +Makefile: $(srcdir)/../../../../../../../Library/Caches/node-gyp/14.15.4/include/node/common.gypi $(srcdir)/../../../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(call do_cmd,regen_makefile) # "all" is a concatenation of the "all" targets from all the included diff --git a/node_modules/mquery/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw b/node_modules/mquery/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw new file mode 100644 index 000000000..4ef18ebe6 --- /dev/null +++ b/node_modules/mquery/node_modules/bson/build/Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw @@ -0,0 +1,12 @@ +Release/obj.target/bson/ext/bson.o: ../ext/bson.cc \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/cppgc/common.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8config.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-internal.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/v8-platform.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_version.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_buffer.h \ + ../ext/bson.h \ + /Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node/node_object_wrap.h diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/binding.Makefile b/node_modules/mquery/node_modules/bson/build/binding.Makefile similarity index 69% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/binding.Makefile rename to node_modules/mquery/node_modules/bson/build/binding.Makefile index 90bf8247b..d7430e6d7 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build/binding.Makefile +++ b/node_modules/mquery/node_modules/bson/build/binding.Makefile @@ -1,6 +1,6 @@ # This file is generated by gyp; do not edit. -export builddir_name ?= build/./. +export builddir_name ?= ./build/. .PHONY: all all: $(MAKE) bson diff --git a/node_modules/mquery/node_modules/bson/build/bson.target.mk b/node_modules/mquery/node_modules/bson/build/bson.target.mk new file mode 100644 index 000000000..b58e04ad3 --- /dev/null +++ b/node_modules/mquery/node_modules/bson/build/bson.target.mk @@ -0,0 +1,188 @@ +# This file is generated by gyp; do not edit. + +TOOLSET := target +TARGET := bson +DEFS_Debug := \ + '-DNODE_GYP_MODULE_NAME=bson' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-DBUILDING_NODE_EXTENSION' \ + '-DDEBUG' \ + '-D_DEBUG' \ + '-DV8_ENABLE_CHECKS' + +# Flags passed to all source files. +CFLAGS_Debug := \ + -O0 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Debug := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Debug := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Debug := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Debug := + +INCS_Debug := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +DEFS_Release := \ + '-DNODE_GYP_MODULE_NAME=bson' \ + '-DUSING_UV_SHARED=1' \ + '-DUSING_V8_SHARED=1' \ + '-DV8_DEPRECATION_WARNINGS=1' \ + '-DV8_DEPRECATION_WARNINGS' \ + '-DV8_IMMINENT_DEPRECATION_WARNINGS' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DOPENSSL_NO_PINSHARED' \ + '-DOPENSSL_THREADS' \ + '-DBUILDING_NODE_EXTENSION' + +# Flags passed to all source files. +CFLAGS_Release := \ + -O3 \ + -gdwarf-2 \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Release := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Release := \ + -std=gnu++1y \ + -stdlib=libc++ \ + -fno-rtti \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Release := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Release := + +INCS_Release := \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/include/node \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/src \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/config \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/openssl/openssl/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/uv/include \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/zlib \ + -I/Users/tamtranht02/Library/Caches/node-gyp/14.15.4/deps/v8/include + +OBJS := \ + $(obj).target/$(TARGET)/ext/bson.o + +# Add to the list of files we specially track dependencies for. +all_deps += $(OBJS) + +# CFLAGS et al overrides must be target-local. +# See "Target-specific Variable Values" in the GNU Make manual. +$(OBJS): TOOLSET := $(TOOLSET) +$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) +$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) + +# Suffix rules, putting all outputs into $(obj). + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# Try building from generated source, too. + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# End of this set of suffix rules +### Rules for final target. +LDFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Debug := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LDFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.13 \ + -arch x86_64 \ + -L$(builddir) \ + -stdlib=libc++ + +LIBTOOLFLAGS_Release := \ + -undefined dynamic_lookup \ + -Wl,-no_pie \ + -Wl,-search_paths_first + +LIBS := + +$(builddir)/bson.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) +$(builddir)/bson.node: LIBS := $(LIBS) +$(builddir)/bson.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) +$(builddir)/bson.node: TOOLSET := $(TOOLSET) +$(builddir)/bson.node: $(OBJS) FORCE_DO_CMD + $(call do_cmd,solink_module) + +all_deps += $(builddir)/bson.node +# Add target alias +.PHONY: bson +bson: $(builddir)/bson.node + +# Short alias for building this executable. +.PHONY: bson.node +bson.node: $(builddir)/bson.node + +# Add executable to "all" target. +.PHONY: all +all: $(builddir)/bson.node + diff --git a/node_modules/mquery/node_modules/bson/build/config.gypi b/node_modules/mquery/node_modules/bson/build/config.gypi new file mode 100644 index 000000000..6a09d0a25 --- /dev/null +++ b/node_modules/mquery/node_modules/bson/build/config.gypi @@ -0,0 +1,203 @@ +# Do not edit. File was generated by node-gyp's "configure" step +{ + "target_defaults": { + "cflags": [], + "default_configuration": "Release", + "defines": [], + "include_dirs": [], + "libraries": [] + }, + "variables": { + "asan": 0, + "build_v8_with_gn": "false", + "coverage": "false", + "dcheck_always_on": 0, + "debug_nghttp2": "false", + "debug_node": "false", + "enable_lto": "false", + "enable_pgo_generate": "false", + "enable_pgo_use": "false", + "error_on_warn": "false", + "force_dynamic_crt": 0, + "host_arch": "x64", + "icu_data_in": "../../deps/icu-tmp/icudt67l.dat", + "icu_endianness": "l", + "icu_gyp_path": "tools/icu/icu-generic.gyp", + "icu_path": "deps/icu-small", + "icu_small": "false", + "icu_ver_major": "67", + "is_debug": 0, + "llvm_version": "11.0", + "napi_build_version": "7", + "node_byteorder": "little", + "node_debug_lib": "false", + "node_enable_d8": "false", + "node_install_npm": "true", + "node_module_version": 83, + "node_no_browser_globals": "false", + "node_prefix": "/usr/local", + "node_release_urlbase": "https://nodejs.org/download/release/", + "node_shared": "false", + "node_shared_brotli": "false", + "node_shared_cares": "false", + "node_shared_http_parser": "false", + "node_shared_libuv": "false", + "node_shared_nghttp2": "false", + "node_shared_openssl": "false", + "node_shared_zlib": "false", + "node_tag": "", + "node_target_type": "executable", + "node_use_bundled_v8": "true", + "node_use_dtrace": "true", + "node_use_etw": "false", + "node_use_node_code_cache": "true", + "node_use_node_snapshot": "true", + "node_use_openssl": "true", + "node_use_v8_platform": "true", + "node_with_ltcg": "false", + "node_without_node_options": "false", + "openssl_fips": "", + "openssl_is_fips": "false", + "ossfuzz": "false", + "shlib_suffix": "83.dylib", + "target_arch": "x64", + "v8_enable_31bit_smis_on_64bit_arch": 0, + "v8_enable_gdbjit": 0, + "v8_enable_i18n_support": 1, + "v8_enable_inspector": 1, + "v8_enable_lite_mode": 0, + "v8_enable_object_print": 1, + "v8_enable_pointer_compression": 0, + "v8_no_strict_aliasing": 1, + "v8_optimized_debug": 1, + "v8_promise_internal_field_count": 1, + "v8_random_seed": 0, + "v8_trace_maps": 0, + "v8_use_siphash": 1, + "want_separate_host_toolset": 0, + "xcode_version": "11.0", + "nodedir": "/Users/tamtranht02/Library/Caches/node-gyp/14.15.4", + "standalone_static_library": 1, + "dry_run": "", + "legacy_bundling": "", + "save_dev": "", + "browser": "", + "commit_hooks": "true", + "only": "", + "viewer": "man", + "also": "", + "rollback": "true", + "sign_git_commit": "", + "audit": "true", + "usage": "", + "globalignorefile": "/usr/local/etc/npmignore", + "init_author_url": "", + "maxsockets": "50", + "shell": "/bin/bash", + "metrics_registry": "https://registry.npmjs.org/", + "parseable": "", + "shrinkwrap": "true", + "init_license": "ISC", + "timing": "", + "if_present": "", + "cache_max": "Infinity", + "init_author_email": "", + "sign_git_tag": "", + "cert": "", + "git_tag_version": "true", + "local_address": "", + "long": "", + "preid": "", + "fetch_retries": "2", + "registry": "https://registry.npmjs.org/", + "key": "", + "message": "%s", + "versions": "", + "globalconfig": "/usr/local/etc/npmrc", + "always_auth": "", + "logs_max": "10", + "prefer_online": "", + "cache_lock_retries": "10", + "global_style": "", + "update_notifier": "true", + "audit_level": "low", + "heading": "npm", + "fetch_retry_mintimeout": "10000", + "offline": "", + "read_only": "", + "searchlimit": "20", + "access": "", + "json": "", + "allow_same_version": "", + "description": "true", + "engine_strict": "", + "https_proxy": "", + "init_module": "/Users/tamtranht02/.npm-init.js", + "userconfig": "/Users/tamtranht02/.npmrc", + "cidr": "", + "node_version": "14.15.4", + "user": "", + "auth_type": "legacy", + "editor": "vi", + "ignore_prepublish": "", + "save": "true", + "script_shell": "", + "tag": "latest", + "before": "", + "global": "", + "progress": "true", + "ham_it_up": "", + "optional": "true", + "searchstaleness": "900", + "bin_links": "true", + "force": "", + "save_prod": "", + "searchopts": "", + "depth": "Infinity", + "node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js", + "rebuild_bundle": "true", + "sso_poll_frequency": "500", + "unicode": "true", + "fetch_retry_maxtimeout": "60000", + "ca": "", + "save_prefix": "^", + "scripts_prepend_node_path": "warn-only", + "sso_type": "oauth", + "strict_ssl": "true", + "tag_version_prefix": "v", + "dev": "", + "fetch_retry_factor": "10", + "group": "20", + "save_exact": "", + "cache_lock_stale": "60000", + "prefer_offline": "", + "version": "", + "cache_min": "10", + "otp": "", + "cache": "/Users/tamtranht02/.npm", + "searchexclude": "", + "color": "true", + "package_lock": "true", + "fund": "true", + "package_lock_only": "", + "save_optional": "", + "user_agent": "npm/6.14.10 node/v14.15.4 darwin x64", + "ignore_scripts": "", + "cache_lock_wait": "10000", + "production": "", + "save_bundle": "", + "send_metrics": "", + "init_version": "1.0.0", + "node_options": "", + "umask": "0022", + "scope": "", + "git": "git", + "init_author_name": "", + "onload_script": "", + "tmp": "/var/folders/gz/y49xvbws5fl_4hbmp6h870vh0000gn/T", + "unsafe_perm": "true", + "format_package_lock": "true", + "link": "", + "prefix": "/usr/local" + } +} diff --git a/node_modules/mquery/node_modules/bson/build/gyp-mac-tool b/node_modules/mquery/node_modules/bson/build/gyp-mac-tool new file mode 100755 index 000000000..033b4e538 --- /dev/null +++ b/node_modules/mquery/node_modules/bson/build/gyp-mac-tool @@ -0,0 +1,615 @@ +#!/usr/bin/env python +# Generated by gyp. Do not edit. +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + +from __future__ import print_function + +import fcntl +import fnmatch +import glob +import json +import os +import plistlib +import re +import shutil +import string +import subprocess +import sys +import tempfile + +PY3 = bytes != str + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool(object): + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecCopyBundleResource(self, source, dest, convert_to_binary): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + # TODO(thakis): This copies file attributes like mtime, while the + # single-file branch below doesn't. This should probably be changed to + # be consistent with the single-file branch. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == '.xib': + return self._CopyXIBFile(source, dest) + elif extension == '.storyboard': + return self._CopyXIBFile(source, dest) + elif extension == '.strings': + self._CopyStringsFile(source, dest, convert_to_binary) + else: + shutil.copy(source, dest) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + + # ibtool sometimes crashes with relative paths. See crbug.com/314728. + base = os.path.dirname(os.path.realpath(__file__)) + if os.path.relpath(source): + source = os.path.join(base, source) + if os.path.relpath(dest): + dest = os.path.join(base, dest) + + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text', '--compile', dest, source] + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) + current_section_header = None + for line in ibtoolout.stdout: + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + sys.stdout.write(current_section_header) + current_section_header = None + sys.stdout.write(line) + return ibtoolout.returncode + + def _ConvertToBinary(self, dest): + subprocess.check_call([ + 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) + + def _CopyStringsFile(self, source, dest, convert_to_binary): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + s = open(source, 'rb').read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + + fp = open(dest, 'wb') + fp.write(s.decode(input_code).encode('UTF-16')) + fp.close() + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + fp = open(file_name, 'rb') + try: + header = fp.read(3) + except Exception: + fp.close() + return None + fp.close() + if header.startswith("\xFE\xFF"): + return "UTF-16" + elif header.startswith("\xFF\xFE"): + return "UTF-16" + elif header.startswith("\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + fd = open(source, 'r') + lines = fd.read() + fd.close() + + # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). + plist = plistlib.readPlistFromString(lines) + if keys: + plist = dict(plist.items() + json.loads(keys[0]).items()) + lines = plistlib.writePlistToString(plist) + + # Go through all the environment variables and replace them as variables in + # the file. + IDENT_RE = re.compile(r'[/\s]') + for key in os.environ: + if key.startswith('_'): + continue + evar = '${%s}' % key + evalue = os.environ[key] + lines = string.replace(lines, evar, evalue) + + # Xcode supports various suffices on environment variables, which are + # all undocumented. :rfc1034identifier is used in the standard project + # template these days, and :identifier was used earlier. They are used to + # convert non-url characters into things that look like valid urls -- + # except that the replacement character for :identifier, '_' isn't valid + # in a URL either -- oops, hence :rfc1034identifier was born. + evar = '${%s:identifier}' % key + evalue = IDENT_RE.sub('_', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + evar = '${%s:rfc1034identifier}' % key + evalue = IDENT_RE.sub('-', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + # Remove any keys with values that haven't been replaced. + lines = lines.split('\n') + for i in range(len(lines)): + if lines[i].strip().startswith("${"): + lines[i] = None + lines[i - 1] = None + lines = '\n'.join(filter(lambda x: x is not None, lines)) + + # Write out the file with variables replaced. + fd = open(dest, 'w') + fd.write(lines) + fd.close() + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: # Wrong length resets everything, too. + signature_code = '?' * 4 + + dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') + fp = open(dest, 'w') + fp.write('%s%s' % (package_type, signature_code)) + fp.close() + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out '/path/to/libtool: file: foo.o has no + symbols'.""" + libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re5 = re.compile( + r'^.*libtool: warning for library: ' + + r'.* the table of contents is empty ' + + r'\(no object file members in the library define global symbols\)$') + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env['ZERO_AR_DATE'] = '1' + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + if PY3: + err = err.decode('utf-8') + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print(line, file=sys.stderr) + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): + os.utime(cmd_list[i+1], None) + break + return libtoolout.returncode + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + def ExecCompileXcassets(self, keys, *inputs): + """Compiles multiple .xcassets files into a single .car file. + + This invokes 'actool' to compile all the inputs .xcassets files. The + |keys| arguments is a json-encoded dictionary of extra arguments to + pass to 'actool' when the asset catalogs contains an application icon + or a launch image. + + Note that 'actool' does not create the Assets.car file if the asset + catalogs does not contains imageset. + """ + command_line = [ + 'xcrun', 'actool', '--output-format', 'human-readable-text', + '--compress-pngs', '--notices', '--warnings', '--errors', + ] + is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ + if is_iphone_target: + platform = os.environ['CONFIGURATION'].split('-')[-1] + if platform not in ('iphoneos', 'iphonesimulator'): + platform = 'iphonesimulator' + command_line.extend([ + '--platform', platform, '--target-device', 'iphone', + '--target-device', 'ipad', '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', + os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), + ]) + else: + command_line.extend([ + '--platform', 'macosx', '--target-device', 'mac', + '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], + '--compile', + os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), + ]) + if keys: + keys = json.loads(keys) + for key, value in keys.items(): + arg_name = '--' + key + if isinstance(value, bool): + if value: + command_line.append(arg_name) + elif isinstance(value, list): + for v in value: + command_line.append(arg_name) + command_line.append(str(v)) + else: + command_line.append(arg_name) + command_line.append(str(value)) + # Note: actool crashes if inputs path are relative, so use os.path.abspath + # to get absolute path name for inputs. + command_line.extend(map(os.path.abspath, inputs)) + subprocess.check_call(command_line) + + def ExecMergeInfoPlist(self, output, *inputs): + """Merge multiple .plist files into a single .plist file.""" + merged_plist = {} + for path in inputs: + plist = self._LoadPlistMaybeBinary(path) + self._MergePlist(merged_plist, plist) + plistlib.writePlist(merged_plist, output) + + def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. copy ResourceRules.plist from the user or the SDK into the bundle, + 2. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 3. copy Entitlements.plist from user or SDK next to the bundle, + 4. code sign the bundle. + """ + resource_rules_path = self._InstallResourceRules(resource_rules) + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier()) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides) + subprocess.check_call([ + 'codesign', '--force', '--sign', key, '--resource-rules', + resource_rules_path, '--entitlements', entitlements_path, + os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['FULL_PRODUCT_NAME'])]) + + def _InstallResourceRules(self, resource_rules): + """Installs ResourceRules.plist from user or SDK into the bundle. + + Args: + resource_rules: string, optional, path to the ResourceRules.plist file + to use, default to "${SDKROOT}/ResourceRules.plist" + + Returns: + Path to the copy of ResourceRules.plist into the bundle. + """ + source_path = resource_rules + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'ResourceRules.plist') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], 'ResourceRules.plist') + shutil.copy2(source_path, target_path) + return target_path + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier) + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'embedded.mobileprovision') + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') + return substitutions, provisioning_data['Entitlements'] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + if not os.path.isdir(profiles_dir): + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, '*.mobileprovision')) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get( + 'Entitlements', {}).get('application-identifier', '') + for team_identifier in profile_data.get('TeamIdentifier', []): + app_id = '%s.%s' % (team_identifier, bundle_identifier) + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, profile_data, team_identifier) + if not valid_provisioning_profiles: + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call([ + 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) + return self._LoadPlistMaybeBinary(temp.name) + + def _MergePlist(self, merged_plist, plist): + """Merge |plist| into |merged_plist|.""" + for key, value in plist.items(): + if isinstance(value, dict): + merged_value = merged_plist.get(key, {}) + if isinstance(merged_value, dict): + self._MergePlist(merged_value, value) + merged_plist[key] = merged_value + else: + merged_plist[key] = value + else: + merged_plist[key] = value + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['INFOPLIST_PATH']) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data['CFBundleIdentifier'] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['PRODUCT_NAME'] + '.xcent') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], + 'Entitlements.plist') + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.items(): + data = data.replace('$(%s)' % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return {k: self._ExpandVariables(data[k], substitutions) for k in data} + return data + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build_browser.js b/node_modules/mquery/node_modules/bson/build_browser.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/build_browser.js rename to node_modules/mquery/node_modules/bson/build_browser.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/Makefile b/node_modules/mquery/node_modules/bson/ext/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/Makefile rename to node_modules/mquery/node_modules/bson/ext/Makefile diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.cc b/node_modules/mquery/node_modules/bson/ext/bson.cc similarity index 97% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.cc rename to node_modules/mquery/node_modules/bson/ext/bson.cc index 35c1709ce..b30180a66 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.cc +++ b/node_modules/mquery/node_modules/bson/ext/bson.cc @@ -1,1045 +1,1045 @@ -//=========================================================================== - -#include -#include -#include -#include -#include - -#ifdef __clang__ -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunused-parameter" -#endif - -#include - -// this and the above block must be around the v8.h header otherwise -// v8 is not happy -#ifdef __clang__ -#pragma clang diagnostic pop -#endif - -#include -#include -#include - -#include -#include -#include -#include - -#ifdef __sun - #include -#endif - -#include "bson.h" - -using namespace v8; -using namespace node; - -//=========================================================================== - -void DataStream::WriteObjectId(const Handle& object, const Handle& key) -{ - uint16_t buffer[12]; - object->Get(key)->ToString()->Write(buffer, 0, 12); - for(uint32_t i = 0; i < 12; ++i) - { - *p++ = (char) buffer[i]; - } -} - -void ThrowAllocatedStringException(size_t allocationSize, const char* format, ...) -{ - va_list args; - va_start(args, format); - char* string = (char*) malloc(allocationSize); - vsprintf(string, format, args); - va_end(args); - - throw string; -} - -void DataStream::CheckKey(const Local& keyName) -{ - size_t keyLength = keyName->Utf8Length(); - if(keyLength == 0) return; - - // Allocate space for the key, do not need to zero terminate as WriteUtf8 does it - char* keyStringBuffer = (char*) alloca(keyLength + 1); - // Write the key to the allocated buffer - keyName->WriteUtf8(keyStringBuffer); - // Check for the zero terminator - char* terminator = strchr(keyStringBuffer, 0x00); - - // If the location is not at the end of the string we've got an illegal 0x00 byte somewhere - if(terminator != &keyStringBuffer[keyLength]) { - ThrowAllocatedStringException(64+keyLength, "key %s must not contain null bytes", keyStringBuffer); - } - - if(keyStringBuffer[0] == '$') - { - ThrowAllocatedStringException(64+keyLength, "key %s must not start with '$'", keyStringBuffer); - } - - if(strchr(keyStringBuffer, '.') != NULL) - { - ThrowAllocatedStringException(64+keyLength, "key %s must not contain '.'", keyStringBuffer); - } -} - -template void BSONSerializer::SerializeDocument(const Handle& value) -{ - void* documentSize = this->BeginWriteSize(); - Local object = bson->GetSerializeObject(value); - - // Get the object property names - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 - Local propertyNames = object->GetPropertyNames(); - #else - Local propertyNames = object->GetOwnPropertyNames(); - #endif - - // Length of the property - int propertyLength = propertyNames->Length(); - for(int i = 0; i < propertyLength; ++i) - { - const Local& propertyName = propertyNames->Get(i)->ToString(); - if(checkKeys) this->CheckKey(propertyName); - - const Local& propertyValue = object->Get(propertyName); - - if(serializeFunctions || !propertyValue->IsFunction()) - { - void* typeLocation = this->BeginWriteType(); - this->WriteString(propertyName); - SerializeValue(typeLocation, propertyValue); - } - } - - this->WriteByte(0); - this->CommitSize(documentSize); -} - -template void BSONSerializer::SerializeArray(const Handle& value) -{ - void* documentSize = this->BeginWriteSize(); - - Local array = Local::Cast(value->ToObject()); - uint32_t arrayLength = array->Length(); - - for(uint32_t i = 0; i < arrayLength; ++i) - { - void* typeLocation = this->BeginWriteType(); - this->WriteUInt32String(i); - SerializeValue(typeLocation, array->Get(i)); - } - - this->WriteByte(0); - this->CommitSize(documentSize); -} - -// This is templated so that we can use this function to both count the number of bytes, and to serialize those bytes. -// The template approach eliminates almost all of the inspection of values unless they're required (eg. string lengths) -// and ensures that there is always consistency between bytes counted and bytes written by design. -template void BSONSerializer::SerializeValue(void* typeLocation, const Handle& value) -{ - if(value->IsNumber()) - { - double doubleValue = value->NumberValue(); - int intValue = (int) doubleValue; - if(intValue == doubleValue) - { - this->CommitType(typeLocation, BSON_TYPE_INT); - this->WriteInt32(intValue); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_NUMBER); - this->WriteDouble(doubleValue); - } - } - else if(value->IsString()) - { - this->CommitType(typeLocation, BSON_TYPE_STRING); - this->WriteLengthPrefixedString(value->ToString()); - } - else if(value->IsBoolean()) - { - this->CommitType(typeLocation, BSON_TYPE_BOOLEAN); - this->WriteBool(value); - } - else if(value->IsArray()) - { - this->CommitType(typeLocation, BSON_TYPE_ARRAY); - SerializeArray(value); - } - else if(value->IsDate()) - { - this->CommitType(typeLocation, BSON_TYPE_DATE); - this->WriteInt64(value); - } - else if(value->IsRegExp()) - { - this->CommitType(typeLocation, BSON_TYPE_REGEXP); - const Handle& regExp = Handle::Cast(value); - - this->WriteString(regExp->GetSource()); - - int flags = regExp->GetFlags(); - if(flags & RegExp::kGlobal) this->WriteByte('s'); - if(flags & RegExp::kIgnoreCase) this->WriteByte('i'); - if(flags & RegExp::kMultiline) this->WriteByte('m'); - this->WriteByte(0); - } - else if(value->IsFunction()) - { - this->CommitType(typeLocation, BSON_TYPE_CODE); - this->WriteLengthPrefixedString(value->ToString()); - } - else if(value->IsObject()) - { - const Local& object = value->ToObject(); - if(object->Has(bson->_bsontypeString)) - { - const Local& constructorString = object->GetConstructorName(); - if(bson->longString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_LONG); - this->WriteInt32(object, bson->_longLowString); - this->WriteInt32(object, bson->_longHighString); - } - else if(bson->timestampString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_TIMESTAMP); - this->WriteInt32(object, bson->_longLowString); - this->WriteInt32(object, bson->_longHighString); - } - else if(bson->objectIDString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_OID); - this->WriteObjectId(object, bson->_objectIDidString); - } - else if(bson->binaryString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_BINARY); - - uint32_t length = object->Get(bson->_binaryPositionString)->Uint32Value(); - Local bufferObj = object->Get(bson->_binaryBufferString)->ToObject(); - - this->WriteInt32(length); - this->WriteByte(object, bson->_binarySubTypeString); // write subtype - // If type 0x02 write the array length aswell - if(object->Get(bson->_binarySubTypeString)->Int32Value() == 0x02) { - this->WriteInt32(length); - } - // Write the actual data - this->WriteData(Buffer::Data(bufferObj), length); - } - else if(bson->doubleString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_NUMBER); - this->WriteDouble(object, bson->_doubleValueString); - } - else if(bson->symbolString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_SYMBOL); - this->WriteLengthPrefixedString(object->Get(bson->_symbolValueString)->ToString()); - } - else if(bson->codeString->StrictEquals(constructorString)) - { - const Local& function = object->Get(bson->_codeCodeString)->ToString(); - const Local& scope = object->Get(bson->_codeScopeString)->ToObject(); - - // For Node < 0.6.X use the GetPropertyNames - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 - uint32_t propertyNameLength = scope->GetPropertyNames()->Length(); - #else - uint32_t propertyNameLength = scope->GetOwnPropertyNames()->Length(); - #endif - - if(propertyNameLength > 0) - { - this->CommitType(typeLocation, BSON_TYPE_CODE_W_SCOPE); - void* codeWidthScopeSize = this->BeginWriteSize(); - this->WriteLengthPrefixedString(function->ToString()); - SerializeDocument(scope); - this->CommitSize(codeWidthScopeSize); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_CODE); - this->WriteLengthPrefixedString(function->ToString()); - } - } - else if(bson->dbrefString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_OBJECT); - - void* dbRefSize = this->BeginWriteSize(); - - void* refType = this->BeginWriteType(); - this->WriteData("$ref", 5); - SerializeValue(refType, object->Get(bson->_dbRefNamespaceString)); - - void* idType = this->BeginWriteType(); - this->WriteData("$id", 4); - SerializeValue(idType, object->Get(bson->_dbRefOidString)); - - const Local& refDbValue = object->Get(bson->_dbRefDbString); - if(!refDbValue->IsUndefined()) - { - void* dbType = this->BeginWriteType(); - this->WriteData("$db", 4); - SerializeValue(dbType, refDbValue); - } - - this->WriteByte(0); - this->CommitSize(dbRefSize); - } - else if(bson->minKeyString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_MIN_KEY); - } - else if(bson->maxKeyString->StrictEquals(constructorString)) - { - this->CommitType(typeLocation, BSON_TYPE_MAX_KEY); - } - } - else if(Buffer::HasInstance(value)) - { - this->CommitType(typeLocation, BSON_TYPE_BINARY); - - #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Buffer *buffer = ObjectWrap::Unwrap(value->ToObject()); - uint32_t length = object->length(); - #else - uint32_t length = Buffer::Length(value->ToObject()); - #endif - - this->WriteInt32(length); - this->WriteByte(0); - this->WriteData(Buffer::Data(value->ToObject()), length); - } - else - { - this->CommitType(typeLocation, BSON_TYPE_OBJECT); - SerializeDocument(value); - } - } - else if(value->IsNull() || value->IsUndefined()) - { - this->CommitType(typeLocation, BSON_TYPE_NULL); - } -} - -// Data points to start of element list, length is length of entire document including '\0' but excluding initial size -BSONDeserializer::BSONDeserializer(BSON* aBson, char* data, size_t length) -: bson(aBson), - pStart(data), - p(data), - pEnd(data + length - 1) -{ - if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); -} - -BSONDeserializer::BSONDeserializer(BSONDeserializer& parentSerializer, size_t length) -: bson(parentSerializer.bson), - pStart(parentSerializer.p), - p(parentSerializer.p), - pEnd(parentSerializer.p + length - 1) -{ - parentSerializer.p += length; - if(pEnd > parentSerializer.pEnd) ThrowAllocatedStringException(64, "Child document exceeds parent's bounds"); - if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); -} - -Local BSONDeserializer::ReadCString() -{ - char* start = p; - while(*p++) { } - return String::New(start, (int32_t) (p-start-1) ); -} - -int32_t BSONDeserializer::ReadRegexOptions() -{ - int32_t options = 0; - for(;;) - { - switch(*p++) - { - case '\0': return options; - case 's': options |= RegExp::kGlobal; break; - case 'i': options |= RegExp::kIgnoreCase; break; - case 'm': options |= RegExp::kMultiline; break; - } - } -} - -uint32_t BSONDeserializer::ReadIntegerString() -{ - uint32_t value = 0; - while(*p) - { - if(*p < '0' || *p > '9') ThrowAllocatedStringException(64, "Invalid key for array"); - value = value * 10 + *p++ - '0'; - } - ++p; - return value; -} - -Local BSONDeserializer::ReadString() -{ - uint32_t length = ReadUInt32(); - char* start = p; - p += length; - return String::New(start, length-1); -} - -Local BSONDeserializer::ReadObjectId() -{ - uint16_t objectId[12]; - for(size_t i = 0; i < 12; ++i) - { - objectId[i] = *reinterpret_cast(p++); - } - return String::New(objectId, 12); -} - -Handle BSONDeserializer::DeserializeDocument(bool promoteLongs) -{ - uint32_t length = ReadUInt32(); - if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Document is less than 5 bytes"); - - BSONDeserializer documentDeserializer(*this, length-4); - return documentDeserializer.DeserializeDocumentInternal(promoteLongs); -} - -Handle BSONDeserializer::DeserializeDocumentInternal(bool promoteLongs) -{ - Local returnObject = Object::New(); - - while(HasMoreData()) - { - BsonType type = (BsonType) ReadByte(); - const Local& name = ReadCString(); - const Handle& value = DeserializeValue(type, promoteLongs); - returnObject->ForceSet(name, value); - } - if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Document: Serialize consumed unexpected number of bytes"); - - // From JavaScript: - // if(object['$id'] != null) object = new DBRef(object['$ref'], object['$id'], object['$db']); - if(returnObject->Has(bson->_dbRefIdRefString)) - { - Local argv[] = { returnObject->Get(bson->_dbRefRefString), returnObject->Get(bson->_dbRefIdRefString), returnObject->Get(bson->_dbRefDbRefString) }; - return bson->dbrefConstructor->NewInstance(3, argv); - } - else - { - return returnObject; - } -} - -Handle BSONDeserializer::DeserializeArray(bool promoteLongs) -{ - uint32_t length = ReadUInt32(); - if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Array Document is less than 5 bytes"); - - BSONDeserializer documentDeserializer(*this, length-4); - return documentDeserializer.DeserializeArrayInternal(promoteLongs); -} - -Handle BSONDeserializer::DeserializeArrayInternal(bool promoteLongs) -{ - Local returnArray = Array::New(); - - while(HasMoreData()) - { - BsonType type = (BsonType) ReadByte(); - uint32_t index = ReadIntegerString(); - const Handle& value = DeserializeValue(type, promoteLongs); - returnArray->Set(index, value); - } - if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Array: Serialize consumed unexpected number of bytes"); - - return returnArray; -} - -Handle BSONDeserializer::DeserializeValue(BsonType type, bool promoteLongs) -{ - switch(type) - { - case BSON_TYPE_STRING: - return ReadString(); - - case BSON_TYPE_INT: - return Integer::New(ReadInt32()); - - case BSON_TYPE_NUMBER: - return Number::New(ReadDouble()); - - case BSON_TYPE_NULL: - return Null(); - - case BSON_TYPE_UNDEFINED: - return Undefined(); - - case BSON_TYPE_TIMESTAMP: - { - int32_t lowBits = ReadInt32(); - int32_t highBits = ReadInt32(); - Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; - return bson->timestampConstructor->NewInstance(2, argv); - } - - case BSON_TYPE_BOOLEAN: - return (ReadByte() != 0) ? True() : False(); - - case BSON_TYPE_REGEXP: - { - const Local& regex = ReadCString(); - int32_t options = ReadRegexOptions(); - return RegExp::New(regex, (RegExp::Flags) options); - } - - case BSON_TYPE_CODE: - { - const Local& code = ReadString(); - const Local& scope = Object::New(); - Local argv[] = { code, scope }; - return bson->codeConstructor->NewInstance(2, argv); - } - - case BSON_TYPE_CODE_W_SCOPE: - { - ReadUInt32(); - const Local& code = ReadString(); - const Handle& scope = DeserializeDocument(promoteLongs); - Local argv[] = { code, scope->ToObject() }; - return bson->codeConstructor->NewInstance(2, argv); - } - - case BSON_TYPE_OID: - { - Local argv[] = { ReadObjectId() }; - return bson->objectIDConstructor->NewInstance(1, argv); - } - - case BSON_TYPE_BINARY: - { - uint32_t length = ReadUInt32(); - uint32_t subType = ReadByte(); - if(subType == 0x02) { - length = ReadInt32(); - } - - Buffer* buffer = Buffer::New(p, length); - p += length; - - Handle argv[] = { buffer->handle_, Uint32::New(subType) }; - return bson->binaryConstructor->NewInstance(2, argv); - } - - case BSON_TYPE_LONG: - { - // Read 32 bit integers - int32_t lowBits = (int32_t) ReadInt32(); - int32_t highBits = (int32_t) ReadInt32(); - - // Promote long is enabled - if(promoteLongs) { - // If value is < 2^53 and >-2^53 - if((highBits < 0x200000 || (highBits == 0x200000 && lowBits == 0)) && highBits >= -0x200000) { - // Adjust the pointer and read as 64 bit value - p -= 8; - // Read the 64 bit value - int64_t finalValue = (int64_t) ReadInt64(); - return Number::New(finalValue); - } - } - - // Decode the Long value - Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; - return bson->longConstructor->NewInstance(2, argv); - } - - case BSON_TYPE_DATE: - return Date::New((double) ReadInt64()); - - case BSON_TYPE_ARRAY: - return DeserializeArray(promoteLongs); - - case BSON_TYPE_OBJECT: - return DeserializeDocument(promoteLongs); - - case BSON_TYPE_SYMBOL: - { - const Local& string = ReadString(); - Local argv[] = { string }; - return bson->symbolConstructor->NewInstance(1, argv); - } - - case BSON_TYPE_MIN_KEY: - return bson->minKeyConstructor->NewInstance(); - - case BSON_TYPE_MAX_KEY: - return bson->maxKeyConstructor->NewInstance(); - - default: - ThrowAllocatedStringException(64, "Unhandled BSON Type: %d", type); - } - - return v8::Null(); -} - - -static Handle VException(const char *msg) -{ - HandleScope scope; - return ThrowException(Exception::Error(String::New(msg))); -} - -Persistent BSON::constructor_template; - -BSON::BSON() : ObjectWrap() -{ - // Setup pre-allocated comparision objects - _bsontypeString = Persistent::New(String::New("_bsontype")); - _longLowString = Persistent::New(String::New("low_")); - _longHighString = Persistent::New(String::New("high_")); - _objectIDidString = Persistent::New(String::New("id")); - _binaryPositionString = Persistent::New(String::New("position")); - _binarySubTypeString = Persistent::New(String::New("sub_type")); - _binaryBufferString = Persistent::New(String::New("buffer")); - _doubleValueString = Persistent::New(String::New("value")); - _symbolValueString = Persistent::New(String::New("value")); - _dbRefRefString = Persistent::New(String::New("$ref")); - _dbRefIdRefString = Persistent::New(String::New("$id")); - _dbRefDbRefString = Persistent::New(String::New("$db")); - _dbRefNamespaceString = Persistent::New(String::New("namespace")); - _dbRefDbString = Persistent::New(String::New("db")); - _dbRefOidString = Persistent::New(String::New("oid")); - _codeCodeString = Persistent::New(String::New("code")); - _codeScopeString = Persistent::New(String::New("scope")); - _toBSONString = Persistent::New(String::New("toBSON")); - - longString = Persistent::New(String::New("Long")); - objectIDString = Persistent::New(String::New("ObjectID")); - binaryString = Persistent::New(String::New("Binary")); - codeString = Persistent::New(String::New("Code")); - dbrefString = Persistent::New(String::New("DBRef")); - symbolString = Persistent::New(String::New("Symbol")); - doubleString = Persistent::New(String::New("Double")); - timestampString = Persistent::New(String::New("Timestamp")); - minKeyString = Persistent::New(String::New("MinKey")); - maxKeyString = Persistent::New(String::New("MaxKey")); -} - -void BSON::Initialize(v8::Handle target) -{ - // Grab the scope of the call from Node - HandleScope scope; - // Define a new function template - Local t = FunctionTemplate::New(New); - constructor_template = Persistent::New(t); - constructor_template->InstanceTemplate()->SetInternalFieldCount(1); - constructor_template->SetClassName(String::NewSymbol("BSON")); - - // Instance methods - NODE_SET_PROTOTYPE_METHOD(constructor_template, "calculateObjectSize", CalculateObjectSize); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "serialize", BSONSerialize); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "serializeWithBufferAndIndex", SerializeWithBufferAndIndex); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "deserialize", BSONDeserialize); - NODE_SET_PROTOTYPE_METHOD(constructor_template, "deserializeStream", BSONDeserializeStream); - - target->ForceSet(String::NewSymbol("BSON"), constructor_template->GetFunction()); -} - -// Create a new instance of BSON and passing it the existing context -Handle BSON::New(const Arguments &args) -{ - HandleScope scope; - - // Check that we have an array - if(args.Length() == 1 && args[0]->IsArray()) - { - // Cast the array to a local reference - Local array = Local::Cast(args[0]); - - if(array->Length() > 0) - { - // Create a bson object instance and return it - BSON *bson = new BSON(); - - uint32_t foundClassesMask = 0; - - // Iterate over all entries to save the instantiate funtions - for(uint32_t i = 0; i < array->Length(); i++) { - // Let's get a reference to the function - Local func = Local::Cast(array->Get(i)); - Local functionName = func->GetName()->ToString(); - - // Save the functions making them persistant handles (they don't get collected) - if(functionName->StrictEquals(bson->longString)) { - bson->longConstructor = Persistent::New(func); - foundClassesMask |= 1; - } else if(functionName->StrictEquals(bson->objectIDString)) { - bson->objectIDConstructor = Persistent::New(func); - foundClassesMask |= 2; - } else if(functionName->StrictEquals(bson->binaryString)) { - bson->binaryConstructor = Persistent::New(func); - foundClassesMask |= 4; - } else if(functionName->StrictEquals(bson->codeString)) { - bson->codeConstructor = Persistent::New(func); - foundClassesMask |= 8; - } else if(functionName->StrictEquals(bson->dbrefString)) { - bson->dbrefConstructor = Persistent::New(func); - foundClassesMask |= 0x10; - } else if(functionName->StrictEquals(bson->symbolString)) { - bson->symbolConstructor = Persistent::New(func); - foundClassesMask |= 0x20; - } else if(functionName->StrictEquals(bson->doubleString)) { - bson->doubleConstructor = Persistent::New(func); - foundClassesMask |= 0x40; - } else if(functionName->StrictEquals(bson->timestampString)) { - bson->timestampConstructor = Persistent::New(func); - foundClassesMask |= 0x80; - } else if(functionName->StrictEquals(bson->minKeyString)) { - bson->minKeyConstructor = Persistent::New(func); - foundClassesMask |= 0x100; - } else if(functionName->StrictEquals(bson->maxKeyString)) { - bson->maxKeyConstructor = Persistent::New(func); - foundClassesMask |= 0x200; - } - } - - // Check if we have the right number of constructors otherwise throw an error - if(foundClassesMask != 0x3ff) { - delete bson; - return VException("Missing function constructor for either [Long/ObjectID/Binary/Code/DbRef/Symbol/Double/Timestamp/MinKey/MaxKey]"); - } else { - bson->Wrap(args.This()); - return args.This(); - } - } - else - { - return VException("No types passed in"); - } - } - else - { - return VException("Argument passed in must be an array of types"); - } -} - -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ -//------------------------------------------------------------------------------------------------ - -Handle BSON::BSONDeserialize(const Arguments &args) -{ - HandleScope scope; - - // Fail if the first argument is not a string or a buffer - if(args.Length() > 1 && !args[0]->IsString() && !Buffer::HasInstance(args[0])) - return VException("First Argument must be a Buffer or String."); - - // Promote longs - bool promoteLongs = true; - - // If we have an options object - if(args.Length() == 2 && args[1]->IsObject()) { - Local options = args[1]->ToObject(); - - if(options->Has(String::New("promoteLongs"))) { - promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); - } - } - - // Define pointer to data - Local obj = args[0]->ToObject(); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // If we passed in a buffer, let's unpack it, otherwise let's unpack the string - if(Buffer::HasInstance(obj)) - { -#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Buffer *buffer = ObjectWrap::Unwrap(obj); - char* data = buffer->data(); - size_t length = buffer->length(); -#else - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); -#endif - - // Validate that we have at least 5 bytes - if(length < 5) return VException("corrupt bson message < 5 bytes long"); - - try - { - BSONDeserializer deserializer(bson, data, length); - // deserializer.promoteLongs = promoteLongs; - return deserializer.DeserializeDocument(promoteLongs); - } - catch(char* exception) - { - Handle error = VException(exception); - free(exception); - return error; - } - - } - else - { - // The length of the data for this encoding - ssize_t len = DecodeBytes(args[0], BINARY); - - // Validate that we have at least 5 bytes - if(len < 5) return VException("corrupt bson message < 5 bytes long"); - - // Let's define the buffer size - char* data = (char *)malloc(len); - DecodeWrite(data, len, args[0], BINARY); - - try - { - BSONDeserializer deserializer(bson, data, len); - // deserializer.promoteLongs = promoteLongs; - Handle result = deserializer.DeserializeDocument(promoteLongs); - free(data); - return result; - - } - catch(char* exception) - { - Handle error = VException(exception); - free(exception); - free(data); - return error; - } - } -} - -Local BSON::GetSerializeObject(const Handle& argValue) -{ - Local object = argValue->ToObject(); - if(object->Has(_toBSONString)) - { - const Local& toBSON = object->Get(_toBSONString); - if(!toBSON->IsFunction()) ThrowAllocatedStringException(64, "toBSON is not a function"); - - Local result = Local::Cast(toBSON)->Call(object, 0, NULL); - if(!result->IsObject()) ThrowAllocatedStringException(64, "toBSON function did not return an object"); - return result->ToObject(); - } - else - { - return object; - } -} - -Handle BSON::BSONSerialize(const Arguments &args) -{ - HandleScope scope; - - if(args.Length() == 1 && !args[0]->IsObject()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 3 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); - if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean() && !args[3]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); - if(args.Length() > 4) return VException("One, two, tree or four arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); - - // Check if we have an array as the object - if(args[0]->IsArray()) return VException("Only javascript objects supported"); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // Calculate the total size of the document in binary form to ensure we only allocate memory once - // With serialize function - bool serializeFunctions = (args.Length() >= 4) && args[3]->BooleanValue(); - - char *serialized_object = NULL; - size_t object_size; - try - { - Local object = bson->GetSerializeObject(args[0]); - - BSONSerializer counter(bson, false, serializeFunctions); - counter.SerializeDocument(object); - object_size = counter.GetSerializeSize(); - - // Allocate the memory needed for the serialization - serialized_object = (char *)malloc(object_size); - - // Check if we have a boolean value - bool checkKeys = args.Length() >= 3 && args[1]->IsBoolean() && args[1]->BooleanValue(); - BSONSerializer data(bson, checkKeys, serializeFunctions, serialized_object); - data.SerializeDocument(object); - } - catch(char *err_msg) - { - free(serialized_object); - Handle error = VException(err_msg); - free(err_msg); - return error; - } - - // If we have 3 arguments - if(args.Length() == 3 || args.Length() == 4) - { - Buffer *buffer = Buffer::New(serialized_object, object_size); - free(serialized_object); - return scope.Close(buffer->handle_); - } - else - { - Local bin_value = Encode(serialized_object, object_size, BINARY)->ToString(); - free(serialized_object); - return bin_value; - } -} - -Handle BSON::CalculateObjectSize(const Arguments &args) -{ - HandleScope scope; - // Ensure we have a valid object - if(args.Length() == 1 && !args[0]->IsObject()) return VException("One argument required - [object]"); - if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return VException("Two arguments required - [object, boolean]"); - if(args.Length() > 3) return VException("One or two arguments required - [object] or [object, boolean]"); - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - bool serializeFunctions = (args.Length() >= 2) && args[1]->BooleanValue(); - BSONSerializer countSerializer(bson, false, serializeFunctions); - countSerializer.SerializeDocument(args[0]); - - // Return the object size - return scope.Close(Uint32::New((uint32_t) countSerializer.GetSerializeSize())); -} - -Handle BSON::SerializeWithBufferAndIndex(const Arguments &args) -{ - HandleScope scope; - - //BSON.serializeWithBufferAndIndex = function serializeWithBufferAndIndex(object, ->, buffer, index) { - // Ensure we have the correct values - if(args.Length() > 5) return VException("Four or five parameters required [object, boolean, Buffer, int] or [object, boolean, Buffer, int, boolean]"); - if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32()) return VException("Four parameters required [object, boolean, Buffer, int]"); - if(args.Length() == 5 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32() && !args[4]->IsBoolean()) return VException("Four parameters required [object, boolean, Buffer, int, boolean]"); - - uint32_t index; - size_t object_size; - - try - { - BSON *bson = ObjectWrap::Unwrap(args.This()); - - Local obj = args[2]->ToObject(); - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); - - index = args[3]->Uint32Value(); - bool checkKeys = args.Length() >= 4 && args[1]->IsBoolean() && args[1]->BooleanValue(); - bool serializeFunctions = (args.Length() == 5) && args[4]->BooleanValue(); - - BSONSerializer dataSerializer(bson, checkKeys, serializeFunctions, data+index); - dataSerializer.SerializeDocument(bson->GetSerializeObject(args[0])); - object_size = dataSerializer.GetSerializeSize(); - - if(object_size + index > length) return VException("Serious error - overflowed buffer!!"); - } - catch(char *exception) - { - Handle error = VException(exception); - free(exception); - return error; - } - - return scope.Close(Uint32::New((uint32_t) (index + object_size - 1))); -} - -Handle BSON::BSONDeserializeStream(const Arguments &args) -{ - HandleScope scope; - - // At least 3 arguments required - if(args.Length() < 5) return VException("Arguments required (Buffer(data), Number(index in data), Number(number of documents to deserialize), Array(results), Number(index in the array), Object(optional))"); - - // If the number of argumets equals 3 - if(args.Length() >= 5) - { - if(!Buffer::HasInstance(args[0])) return VException("First argument must be Buffer instance"); - if(!args[1]->IsUint32()) return VException("Second argument must be a positive index number"); - if(!args[2]->IsUint32()) return VException("Third argument must be a positive number of documents to deserialize"); - if(!args[3]->IsArray()) return VException("Fourth argument must be an array the size of documents to deserialize"); - if(!args[4]->IsUint32()) return VException("Sixth argument must be a positive index number"); - } - - // If we have 4 arguments - if(args.Length() == 6 && !args[5]->IsObject()) return VException("Fifth argument must be an object with options"); - - // Define pointer to data - Local obj = args[0]->ToObject(); - uint32_t numberOfDocuments = args[2]->Uint32Value(); - uint32_t index = args[1]->Uint32Value(); - uint32_t resultIndex = args[4]->Uint32Value(); - bool promoteLongs = true; - - // Check for the value promoteLongs in the options object - if(args.Length() == 6) { - Local options = args[5]->ToObject(); - - // Check if we have the promoteLong variable - if(options->Has(String::New("promoteLongs"))) { - promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); - } - } - - // Unpack the BSON parser instance - BSON *bson = ObjectWrap::Unwrap(args.This()); - - // Unpack the buffer variable -#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 - Buffer *buffer = ObjectWrap::Unwrap(obj); - char* data = buffer->data(); - size_t length = buffer->length(); -#else - char* data = Buffer::Data(obj); - size_t length = Buffer::Length(obj); -#endif - - // Fetch the documents - Local documents = args[3]->ToObject(); - - BSONDeserializer deserializer(bson, data+index, length-index); - for(uint32_t i = 0; i < numberOfDocuments; i++) - { - try - { - documents->Set(i + resultIndex, deserializer.DeserializeDocument(promoteLongs)); - } - catch (char* exception) - { - Handle error = VException(exception); - free(exception); - return error; - } - } - - // Return new index of parsing - return scope.Close(Uint32::New((uint32_t) (index + deserializer.GetSerializeSize()))); -} - -// Exporting function -extern "C" void init(Handle target) -{ - HandleScope scope; - BSON::Initialize(target); -} - -NODE_MODULE(bson, BSON::Initialize); +//=========================================================================== + +#include +#include +#include +#include +#include + +#ifdef __clang__ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wunused-parameter" +#endif + +#include + +// this and the above block must be around the v8.h header otherwise +// v8 is not happy +#ifdef __clang__ +#pragma clang diagnostic pop +#endif + +#include +#include +#include + +#include +#include +#include +#include + +#ifdef __sun + #include +#endif + +#include "bson.h" + +using namespace v8; +using namespace node; + +//=========================================================================== + +void DataStream::WriteObjectId(const Handle& object, const Handle& key) +{ + uint16_t buffer[12]; + object->Get(key)->ToString()->Write(buffer, 0, 12); + for(uint32_t i = 0; i < 12; ++i) + { + *p++ = (char) buffer[i]; + } +} + +void ThrowAllocatedStringException(size_t allocationSize, const char* format, ...) +{ + va_list args; + va_start(args, format); + char* string = (char*) malloc(allocationSize); + vsprintf(string, format, args); + va_end(args); + + throw string; +} + +void DataStream::CheckKey(const Local& keyName) +{ + size_t keyLength = keyName->Utf8Length(); + if(keyLength == 0) return; + + // Allocate space for the key, do not need to zero terminate as WriteUtf8 does it + char* keyStringBuffer = (char*) alloca(keyLength + 1); + // Write the key to the allocated buffer + keyName->WriteUtf8(keyStringBuffer); + // Check for the zero terminator + char* terminator = strchr(keyStringBuffer, 0x00); + + // If the location is not at the end of the string we've got an illegal 0x00 byte somewhere + if(terminator != &keyStringBuffer[keyLength]) { + ThrowAllocatedStringException(64+keyLength, "key %s must not contain null bytes", keyStringBuffer); + } + + if(keyStringBuffer[0] == '$') + { + ThrowAllocatedStringException(64+keyLength, "key %s must not start with '$'", keyStringBuffer); + } + + if(strchr(keyStringBuffer, '.') != NULL) + { + ThrowAllocatedStringException(64+keyLength, "key %s must not contain '.'", keyStringBuffer); + } +} + +template void BSONSerializer::SerializeDocument(const Handle& value) +{ + void* documentSize = this->BeginWriteSize(); + Local object = bson->GetSerializeObject(value); + + // Get the object property names + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 + Local propertyNames = object->GetPropertyNames(); + #else + Local propertyNames = object->GetOwnPropertyNames(); + #endif + + // Length of the property + int propertyLength = propertyNames->Length(); + for(int i = 0; i < propertyLength; ++i) + { + const Local& propertyName = propertyNames->Get(i)->ToString(); + if(checkKeys) this->CheckKey(propertyName); + + const Local& propertyValue = object->Get(propertyName); + + if(serializeFunctions || !propertyValue->IsFunction()) + { + void* typeLocation = this->BeginWriteType(); + this->WriteString(propertyName); + SerializeValue(typeLocation, propertyValue); + } + } + + this->WriteByte(0); + this->CommitSize(documentSize); +} + +template void BSONSerializer::SerializeArray(const Handle& value) +{ + void* documentSize = this->BeginWriteSize(); + + Local array = Local::Cast(value->ToObject()); + uint32_t arrayLength = array->Length(); + + for(uint32_t i = 0; i < arrayLength; ++i) + { + void* typeLocation = this->BeginWriteType(); + this->WriteUInt32String(i); + SerializeValue(typeLocation, array->Get(i)); + } + + this->WriteByte(0); + this->CommitSize(documentSize); +} + +// This is templated so that we can use this function to both count the number of bytes, and to serialize those bytes. +// The template approach eliminates almost all of the inspection of values unless they're required (eg. string lengths) +// and ensures that there is always consistency between bytes counted and bytes written by design. +template void BSONSerializer::SerializeValue(void* typeLocation, const Handle& value) +{ + if(value->IsNumber()) + { + double doubleValue = value->NumberValue(); + int intValue = (int) doubleValue; + if(intValue == doubleValue) + { + this->CommitType(typeLocation, BSON_TYPE_INT); + this->WriteInt32(intValue); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_NUMBER); + this->WriteDouble(doubleValue); + } + } + else if(value->IsString()) + { + this->CommitType(typeLocation, BSON_TYPE_STRING); + this->WriteLengthPrefixedString(value->ToString()); + } + else if(value->IsBoolean()) + { + this->CommitType(typeLocation, BSON_TYPE_BOOLEAN); + this->WriteBool(value); + } + else if(value->IsArray()) + { + this->CommitType(typeLocation, BSON_TYPE_ARRAY); + SerializeArray(value); + } + else if(value->IsDate()) + { + this->CommitType(typeLocation, BSON_TYPE_DATE); + this->WriteInt64(value); + } + else if(value->IsRegExp()) + { + this->CommitType(typeLocation, BSON_TYPE_REGEXP); + const Handle& regExp = Handle::Cast(value); + + this->WriteString(regExp->GetSource()); + + int flags = regExp->GetFlags(); + if(flags & RegExp::kGlobal) this->WriteByte('s'); + if(flags & RegExp::kIgnoreCase) this->WriteByte('i'); + if(flags & RegExp::kMultiline) this->WriteByte('m'); + this->WriteByte(0); + } + else if(value->IsFunction()) + { + this->CommitType(typeLocation, BSON_TYPE_CODE); + this->WriteLengthPrefixedString(value->ToString()); + } + else if(value->IsObject()) + { + const Local& object = value->ToObject(); + if(object->Has(bson->_bsontypeString)) + { + const Local& constructorString = object->GetConstructorName(); + if(bson->longString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_LONG); + this->WriteInt32(object, bson->_longLowString); + this->WriteInt32(object, bson->_longHighString); + } + else if(bson->timestampString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_TIMESTAMP); + this->WriteInt32(object, bson->_longLowString); + this->WriteInt32(object, bson->_longHighString); + } + else if(bson->objectIDString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_OID); + this->WriteObjectId(object, bson->_objectIDidString); + } + else if(bson->binaryString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_BINARY); + + uint32_t length = object->Get(bson->_binaryPositionString)->Uint32Value(); + Local bufferObj = object->Get(bson->_binaryBufferString)->ToObject(); + + this->WriteInt32(length); + this->WriteByte(object, bson->_binarySubTypeString); // write subtype + // If type 0x02 write the array length aswell + if(object->Get(bson->_binarySubTypeString)->Int32Value() == 0x02) { + this->WriteInt32(length); + } + // Write the actual data + this->WriteData(Buffer::Data(bufferObj), length); + } + else if(bson->doubleString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_NUMBER); + this->WriteDouble(object, bson->_doubleValueString); + } + else if(bson->symbolString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_SYMBOL); + this->WriteLengthPrefixedString(object->Get(bson->_symbolValueString)->ToString()); + } + else if(bson->codeString->StrictEquals(constructorString)) + { + const Local& function = object->Get(bson->_codeCodeString)->ToString(); + const Local& scope = object->Get(bson->_codeScopeString)->ToObject(); + + // For Node < 0.6.X use the GetPropertyNames + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 6 + uint32_t propertyNameLength = scope->GetPropertyNames()->Length(); + #else + uint32_t propertyNameLength = scope->GetOwnPropertyNames()->Length(); + #endif + + if(propertyNameLength > 0) + { + this->CommitType(typeLocation, BSON_TYPE_CODE_W_SCOPE); + void* codeWidthScopeSize = this->BeginWriteSize(); + this->WriteLengthPrefixedString(function->ToString()); + SerializeDocument(scope); + this->CommitSize(codeWidthScopeSize); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_CODE); + this->WriteLengthPrefixedString(function->ToString()); + } + } + else if(bson->dbrefString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_OBJECT); + + void* dbRefSize = this->BeginWriteSize(); + + void* refType = this->BeginWriteType(); + this->WriteData("$ref", 5); + SerializeValue(refType, object->Get(bson->_dbRefNamespaceString)); + + void* idType = this->BeginWriteType(); + this->WriteData("$id", 4); + SerializeValue(idType, object->Get(bson->_dbRefOidString)); + + const Local& refDbValue = object->Get(bson->_dbRefDbString); + if(!refDbValue->IsUndefined()) + { + void* dbType = this->BeginWriteType(); + this->WriteData("$db", 4); + SerializeValue(dbType, refDbValue); + } + + this->WriteByte(0); + this->CommitSize(dbRefSize); + } + else if(bson->minKeyString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_MIN_KEY); + } + else if(bson->maxKeyString->StrictEquals(constructorString)) + { + this->CommitType(typeLocation, BSON_TYPE_MAX_KEY); + } + } + else if(Buffer::HasInstance(value)) + { + this->CommitType(typeLocation, BSON_TYPE_BINARY); + + #if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Buffer *buffer = ObjectWrap::Unwrap(value->ToObject()); + uint32_t length = object->length(); + #else + uint32_t length = Buffer::Length(value->ToObject()); + #endif + + this->WriteInt32(length); + this->WriteByte(0); + this->WriteData(Buffer::Data(value->ToObject()), length); + } + else + { + this->CommitType(typeLocation, BSON_TYPE_OBJECT); + SerializeDocument(value); + } + } + else if(value->IsNull() || value->IsUndefined()) + { + this->CommitType(typeLocation, BSON_TYPE_NULL); + } +} + +// Data points to start of element list, length is length of entire document including '\0' but excluding initial size +BSONDeserializer::BSONDeserializer(BSON* aBson, char* data, size_t length) +: bson(aBson), + pStart(data), + p(data), + pEnd(data + length - 1) +{ + if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); +} + +BSONDeserializer::BSONDeserializer(BSONDeserializer& parentSerializer, size_t length) +: bson(parentSerializer.bson), + pStart(parentSerializer.p), + p(parentSerializer.p), + pEnd(parentSerializer.p + length - 1) +{ + parentSerializer.p += length; + if(pEnd > parentSerializer.pEnd) ThrowAllocatedStringException(64, "Child document exceeds parent's bounds"); + if(*pEnd != '\0') ThrowAllocatedStringException(64, "Missing end of document marker '\\0'"); +} + +Local BSONDeserializer::ReadCString() +{ + char* start = p; + while(*p++) { } + return String::New(start, (int32_t) (p-start-1) ); +} + +int32_t BSONDeserializer::ReadRegexOptions() +{ + int32_t options = 0; + for(;;) + { + switch(*p++) + { + case '\0': return options; + case 's': options |= RegExp::kGlobal; break; + case 'i': options |= RegExp::kIgnoreCase; break; + case 'm': options |= RegExp::kMultiline; break; + } + } +} + +uint32_t BSONDeserializer::ReadIntegerString() +{ + uint32_t value = 0; + while(*p) + { + if(*p < '0' || *p > '9') ThrowAllocatedStringException(64, "Invalid key for array"); + value = value * 10 + *p++ - '0'; + } + ++p; + return value; +} + +Local BSONDeserializer::ReadString() +{ + uint32_t length = ReadUInt32(); + char* start = p; + p += length; + return String::New(start, length-1); +} + +Local BSONDeserializer::ReadObjectId() +{ + uint16_t objectId[12]; + for(size_t i = 0; i < 12; ++i) + { + objectId[i] = *reinterpret_cast(p++); + } + return String::New(objectId, 12); +} + +Handle BSONDeserializer::DeserializeDocument(bool promoteLongs) +{ + uint32_t length = ReadUInt32(); + if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Document is less than 5 bytes"); + + BSONDeserializer documentDeserializer(*this, length-4); + return documentDeserializer.DeserializeDocumentInternal(promoteLongs); +} + +Handle BSONDeserializer::DeserializeDocumentInternal(bool promoteLongs) +{ + Local returnObject = Object::New(); + + while(HasMoreData()) + { + BsonType type = (BsonType) ReadByte(); + const Local& name = ReadCString(); + const Handle& value = DeserializeValue(type, promoteLongs); + returnObject->ForceSet(name, value); + } + if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Document: Serialize consumed unexpected number of bytes"); + + // From JavaScript: + // if(object['$id'] != null) object = new DBRef(object['$ref'], object['$id'], object['$db']); + if(returnObject->Has(bson->_dbRefIdRefString)) + { + Local argv[] = { returnObject->Get(bson->_dbRefRefString), returnObject->Get(bson->_dbRefIdRefString), returnObject->Get(bson->_dbRefDbRefString) }; + return bson->dbrefConstructor->NewInstance(3, argv); + } + else + { + return returnObject; + } +} + +Handle BSONDeserializer::DeserializeArray(bool promoteLongs) +{ + uint32_t length = ReadUInt32(); + if(length < 5) ThrowAllocatedStringException(64, "Bad BSON: Array Document is less than 5 bytes"); + + BSONDeserializer documentDeserializer(*this, length-4); + return documentDeserializer.DeserializeArrayInternal(promoteLongs); +} + +Handle BSONDeserializer::DeserializeArrayInternal(bool promoteLongs) +{ + Local returnArray = Array::New(); + + while(HasMoreData()) + { + BsonType type = (BsonType) ReadByte(); + uint32_t index = ReadIntegerString(); + const Handle& value = DeserializeValue(type, promoteLongs); + returnArray->Set(index, value); + } + if(p != pEnd) ThrowAllocatedStringException(64, "Bad BSON Array: Serialize consumed unexpected number of bytes"); + + return returnArray; +} + +Handle BSONDeserializer::DeserializeValue(BsonType type, bool promoteLongs) +{ + switch(type) + { + case BSON_TYPE_STRING: + return ReadString(); + + case BSON_TYPE_INT: + return Integer::New(ReadInt32()); + + case BSON_TYPE_NUMBER: + return Number::New(ReadDouble()); + + case BSON_TYPE_NULL: + return Null(); + + case BSON_TYPE_UNDEFINED: + return Undefined(); + + case BSON_TYPE_TIMESTAMP: + { + int32_t lowBits = ReadInt32(); + int32_t highBits = ReadInt32(); + Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; + return bson->timestampConstructor->NewInstance(2, argv); + } + + case BSON_TYPE_BOOLEAN: + return (ReadByte() != 0) ? True() : False(); + + case BSON_TYPE_REGEXP: + { + const Local& regex = ReadCString(); + int32_t options = ReadRegexOptions(); + return RegExp::New(regex, (RegExp::Flags) options); + } + + case BSON_TYPE_CODE: + { + const Local& code = ReadString(); + const Local& scope = Object::New(); + Local argv[] = { code, scope }; + return bson->codeConstructor->NewInstance(2, argv); + } + + case BSON_TYPE_CODE_W_SCOPE: + { + ReadUInt32(); + const Local& code = ReadString(); + const Handle& scope = DeserializeDocument(promoteLongs); + Local argv[] = { code, scope->ToObject() }; + return bson->codeConstructor->NewInstance(2, argv); + } + + case BSON_TYPE_OID: + { + Local argv[] = { ReadObjectId() }; + return bson->objectIDConstructor->NewInstance(1, argv); + } + + case BSON_TYPE_BINARY: + { + uint32_t length = ReadUInt32(); + uint32_t subType = ReadByte(); + if(subType == 0x02) { + length = ReadInt32(); + } + + Buffer* buffer = Buffer::New(p, length); + p += length; + + Handle argv[] = { buffer->handle_, Uint32::New(subType) }; + return bson->binaryConstructor->NewInstance(2, argv); + } + + case BSON_TYPE_LONG: + { + // Read 32 bit integers + int32_t lowBits = (int32_t) ReadInt32(); + int32_t highBits = (int32_t) ReadInt32(); + + // Promote long is enabled + if(promoteLongs) { + // If value is < 2^53 and >-2^53 + if((highBits < 0x200000 || (highBits == 0x200000 && lowBits == 0)) && highBits >= -0x200000) { + // Adjust the pointer and read as 64 bit value + p -= 8; + // Read the 64 bit value + int64_t finalValue = (int64_t) ReadInt64(); + return Number::New(finalValue); + } + } + + // Decode the Long value + Local argv[] = { Int32::New(lowBits), Int32::New(highBits) }; + return bson->longConstructor->NewInstance(2, argv); + } + + case BSON_TYPE_DATE: + return Date::New((double) ReadInt64()); + + case BSON_TYPE_ARRAY: + return DeserializeArray(promoteLongs); + + case BSON_TYPE_OBJECT: + return DeserializeDocument(promoteLongs); + + case BSON_TYPE_SYMBOL: + { + const Local& string = ReadString(); + Local argv[] = { string }; + return bson->symbolConstructor->NewInstance(1, argv); + } + + case BSON_TYPE_MIN_KEY: + return bson->minKeyConstructor->NewInstance(); + + case BSON_TYPE_MAX_KEY: + return bson->maxKeyConstructor->NewInstance(); + + default: + ThrowAllocatedStringException(64, "Unhandled BSON Type: %d", type); + } + + return v8::Null(); +} + + +static Handle VException(const char *msg) +{ + HandleScope scope; + return ThrowException(Exception::Error(String::New(msg))); +} + +Persistent BSON::constructor_template; + +BSON::BSON() : ObjectWrap() +{ + // Setup pre-allocated comparision objects + _bsontypeString = Persistent::New(String::New("_bsontype")); + _longLowString = Persistent::New(String::New("low_")); + _longHighString = Persistent::New(String::New("high_")); + _objectIDidString = Persistent::New(String::New("id")); + _binaryPositionString = Persistent::New(String::New("position")); + _binarySubTypeString = Persistent::New(String::New("sub_type")); + _binaryBufferString = Persistent::New(String::New("buffer")); + _doubleValueString = Persistent::New(String::New("value")); + _symbolValueString = Persistent::New(String::New("value")); + _dbRefRefString = Persistent::New(String::New("$ref")); + _dbRefIdRefString = Persistent::New(String::New("$id")); + _dbRefDbRefString = Persistent::New(String::New("$db")); + _dbRefNamespaceString = Persistent::New(String::New("namespace")); + _dbRefDbString = Persistent::New(String::New("db")); + _dbRefOidString = Persistent::New(String::New("oid")); + _codeCodeString = Persistent::New(String::New("code")); + _codeScopeString = Persistent::New(String::New("scope")); + _toBSONString = Persistent::New(String::New("toBSON")); + + longString = Persistent::New(String::New("Long")); + objectIDString = Persistent::New(String::New("ObjectID")); + binaryString = Persistent::New(String::New("Binary")); + codeString = Persistent::New(String::New("Code")); + dbrefString = Persistent::New(String::New("DBRef")); + symbolString = Persistent::New(String::New("Symbol")); + doubleString = Persistent::New(String::New("Double")); + timestampString = Persistent::New(String::New("Timestamp")); + minKeyString = Persistent::New(String::New("MinKey")); + maxKeyString = Persistent::New(String::New("MaxKey")); +} + +void BSON::Initialize(v8::Handle target) +{ + // Grab the scope of the call from Node + HandleScope scope; + // Define a new function template + Local t = FunctionTemplate::New(New); + constructor_template = Persistent::New(t); + constructor_template->InstanceTemplate()->SetInternalFieldCount(1); + constructor_template->SetClassName(String::NewSymbol("BSON")); + + // Instance methods + NODE_SET_PROTOTYPE_METHOD(constructor_template, "calculateObjectSize", CalculateObjectSize); + NODE_SET_PROTOTYPE_METHOD(constructor_template, "serialize", BSONSerialize); + NODE_SET_PROTOTYPE_METHOD(constructor_template, "serializeWithBufferAndIndex", SerializeWithBufferAndIndex); + NODE_SET_PROTOTYPE_METHOD(constructor_template, "deserialize", BSONDeserialize); + NODE_SET_PROTOTYPE_METHOD(constructor_template, "deserializeStream", BSONDeserializeStream); + + target->ForceSet(String::NewSymbol("BSON"), constructor_template->GetFunction()); +} + +// Create a new instance of BSON and passing it the existing context +Handle BSON::New(const Arguments &args) +{ + HandleScope scope; + + // Check that we have an array + if(args.Length() == 1 && args[0]->IsArray()) + { + // Cast the array to a local reference + Local array = Local::Cast(args[0]); + + if(array->Length() > 0) + { + // Create a bson object instance and return it + BSON *bson = new BSON(); + + uint32_t foundClassesMask = 0; + + // Iterate over all entries to save the instantiate funtions + for(uint32_t i = 0; i < array->Length(); i++) { + // Let's get a reference to the function + Local func = Local::Cast(array->Get(i)); + Local functionName = func->GetName()->ToString(); + + // Save the functions making them persistant handles (they don't get collected) + if(functionName->StrictEquals(bson->longString)) { + bson->longConstructor = Persistent::New(func); + foundClassesMask |= 1; + } else if(functionName->StrictEquals(bson->objectIDString)) { + bson->objectIDConstructor = Persistent::New(func); + foundClassesMask |= 2; + } else if(functionName->StrictEquals(bson->binaryString)) { + bson->binaryConstructor = Persistent::New(func); + foundClassesMask |= 4; + } else if(functionName->StrictEquals(bson->codeString)) { + bson->codeConstructor = Persistent::New(func); + foundClassesMask |= 8; + } else if(functionName->StrictEquals(bson->dbrefString)) { + bson->dbrefConstructor = Persistent::New(func); + foundClassesMask |= 0x10; + } else if(functionName->StrictEquals(bson->symbolString)) { + bson->symbolConstructor = Persistent::New(func); + foundClassesMask |= 0x20; + } else if(functionName->StrictEquals(bson->doubleString)) { + bson->doubleConstructor = Persistent::New(func); + foundClassesMask |= 0x40; + } else if(functionName->StrictEquals(bson->timestampString)) { + bson->timestampConstructor = Persistent::New(func); + foundClassesMask |= 0x80; + } else if(functionName->StrictEquals(bson->minKeyString)) { + bson->minKeyConstructor = Persistent::New(func); + foundClassesMask |= 0x100; + } else if(functionName->StrictEquals(bson->maxKeyString)) { + bson->maxKeyConstructor = Persistent::New(func); + foundClassesMask |= 0x200; + } + } + + // Check if we have the right number of constructors otherwise throw an error + if(foundClassesMask != 0x3ff) { + delete bson; + return VException("Missing function constructor for either [Long/ObjectID/Binary/Code/DbRef/Symbol/Double/Timestamp/MinKey/MaxKey]"); + } else { + bson->Wrap(args.This()); + return args.This(); + } + } + else + { + return VException("No types passed in"); + } + } + else + { + return VException("Argument passed in must be an array of types"); + } +} + +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ +//------------------------------------------------------------------------------------------------ + +Handle BSON::BSONDeserialize(const Arguments &args) +{ + HandleScope scope; + + // Fail if the first argument is not a string or a buffer + if(args.Length() > 1 && !args[0]->IsString() && !Buffer::HasInstance(args[0])) + return VException("First Argument must be a Buffer or String."); + + // Promote longs + bool promoteLongs = true; + + // If we have an options object + if(args.Length() == 2 && args[1]->IsObject()) { + Local options = args[1]->ToObject(); + + if(options->Has(String::New("promoteLongs"))) { + promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); + } + } + + // Define pointer to data + Local obj = args[0]->ToObject(); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // If we passed in a buffer, let's unpack it, otherwise let's unpack the string + if(Buffer::HasInstance(obj)) + { +#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Buffer *buffer = ObjectWrap::Unwrap(obj); + char* data = buffer->data(); + size_t length = buffer->length(); +#else + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); +#endif + + // Validate that we have at least 5 bytes + if(length < 5) return VException("corrupt bson message < 5 bytes long"); + + try + { + BSONDeserializer deserializer(bson, data, length); + // deserializer.promoteLongs = promoteLongs; + return deserializer.DeserializeDocument(promoteLongs); + } + catch(char* exception) + { + Handle error = VException(exception); + free(exception); + return error; + } + + } + else + { + // The length of the data for this encoding + ssize_t len = DecodeBytes(args[0], BINARY); + + // Validate that we have at least 5 bytes + if(len < 5) return VException("corrupt bson message < 5 bytes long"); + + // Let's define the buffer size + char* data = (char *)malloc(len); + DecodeWrite(data, len, args[0], BINARY); + + try + { + BSONDeserializer deserializer(bson, data, len); + // deserializer.promoteLongs = promoteLongs; + Handle result = deserializer.DeserializeDocument(promoteLongs); + free(data); + return result; + + } + catch(char* exception) + { + Handle error = VException(exception); + free(exception); + free(data); + return error; + } + } +} + +Local BSON::GetSerializeObject(const Handle& argValue) +{ + Local object = argValue->ToObject(); + if(object->Has(_toBSONString)) + { + const Local& toBSON = object->Get(_toBSONString); + if(!toBSON->IsFunction()) ThrowAllocatedStringException(64, "toBSON is not a function"); + + Local result = Local::Cast(toBSON)->Call(object, 0, NULL); + if(!result->IsObject()) ThrowAllocatedStringException(64, "toBSON function did not return an object"); + return result->ToObject(); + } + else + { + return object; + } +} + +Handle BSON::BSONSerialize(const Arguments &args) +{ + HandleScope scope; + + if(args.Length() == 1 && !args[0]->IsObject()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 3 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean]"); + if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !args[2]->IsBoolean() && !args[3]->IsBoolean()) return VException("One, two or tree arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); + if(args.Length() > 4) return VException("One, two, tree or four arguments required - [object] or [object, boolean] or [object, boolean, boolean] or [object, boolean, boolean, boolean]"); + + // Check if we have an array as the object + if(args[0]->IsArray()) return VException("Only javascript objects supported"); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // Calculate the total size of the document in binary form to ensure we only allocate memory once + // With serialize function + bool serializeFunctions = (args.Length() >= 4) && args[3]->BooleanValue(); + + char *serialized_object = NULL; + size_t object_size; + try + { + Local object = bson->GetSerializeObject(args[0]); + + BSONSerializer counter(bson, false, serializeFunctions); + counter.SerializeDocument(object); + object_size = counter.GetSerializeSize(); + + // Allocate the memory needed for the serialization + serialized_object = (char *)malloc(object_size); + + // Check if we have a boolean value + bool checkKeys = args.Length() >= 3 && args[1]->IsBoolean() && args[1]->BooleanValue(); + BSONSerializer data(bson, checkKeys, serializeFunctions, serialized_object); + data.SerializeDocument(object); + } + catch(char *err_msg) + { + free(serialized_object); + Handle error = VException(err_msg); + free(err_msg); + return error; + } + + // If we have 3 arguments + if(args.Length() == 3 || args.Length() == 4) + { + Buffer *buffer = Buffer::New(serialized_object, object_size); + free(serialized_object); + return scope.Close(buffer->handle_); + } + else + { + Local bin_value = Encode(serialized_object, object_size, BINARY)->ToString(); + free(serialized_object); + return bin_value; + } +} + +Handle BSON::CalculateObjectSize(const Arguments &args) +{ + HandleScope scope; + // Ensure we have a valid object + if(args.Length() == 1 && !args[0]->IsObject()) return VException("One argument required - [object]"); + if(args.Length() == 2 && !args[0]->IsObject() && !args[1]->IsBoolean()) return VException("Two arguments required - [object, boolean]"); + if(args.Length() > 3) return VException("One or two arguments required - [object] or [object, boolean]"); + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + bool serializeFunctions = (args.Length() >= 2) && args[1]->BooleanValue(); + BSONSerializer countSerializer(bson, false, serializeFunctions); + countSerializer.SerializeDocument(args[0]); + + // Return the object size + return scope.Close(Uint32::New((uint32_t) countSerializer.GetSerializeSize())); +} + +Handle BSON::SerializeWithBufferAndIndex(const Arguments &args) +{ + HandleScope scope; + + //BSON.serializeWithBufferAndIndex = function serializeWithBufferAndIndex(object, ->, buffer, index) { + // Ensure we have the correct values + if(args.Length() > 5) return VException("Four or five parameters required [object, boolean, Buffer, int] or [object, boolean, Buffer, int, boolean]"); + if(args.Length() == 4 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32()) return VException("Four parameters required [object, boolean, Buffer, int]"); + if(args.Length() == 5 && !args[0]->IsObject() && !args[1]->IsBoolean() && !Buffer::HasInstance(args[2]) && !args[3]->IsUint32() && !args[4]->IsBoolean()) return VException("Four parameters required [object, boolean, Buffer, int, boolean]"); + + uint32_t index; + size_t object_size; + + try + { + BSON *bson = ObjectWrap::Unwrap(args.This()); + + Local obj = args[2]->ToObject(); + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); + + index = args[3]->Uint32Value(); + bool checkKeys = args.Length() >= 4 && args[1]->IsBoolean() && args[1]->BooleanValue(); + bool serializeFunctions = (args.Length() == 5) && args[4]->BooleanValue(); + + BSONSerializer dataSerializer(bson, checkKeys, serializeFunctions, data+index); + dataSerializer.SerializeDocument(bson->GetSerializeObject(args[0])); + object_size = dataSerializer.GetSerializeSize(); + + if(object_size + index > length) return VException("Serious error - overflowed buffer!!"); + } + catch(char *exception) + { + Handle error = VException(exception); + free(exception); + return error; + } + + return scope.Close(Uint32::New((uint32_t) (index + object_size - 1))); +} + +Handle BSON::BSONDeserializeStream(const Arguments &args) +{ + HandleScope scope; + + // At least 3 arguments required + if(args.Length() < 5) return VException("Arguments required (Buffer(data), Number(index in data), Number(number of documents to deserialize), Array(results), Number(index in the array), Object(optional))"); + + // If the number of argumets equals 3 + if(args.Length() >= 5) + { + if(!Buffer::HasInstance(args[0])) return VException("First argument must be Buffer instance"); + if(!args[1]->IsUint32()) return VException("Second argument must be a positive index number"); + if(!args[2]->IsUint32()) return VException("Third argument must be a positive number of documents to deserialize"); + if(!args[3]->IsArray()) return VException("Fourth argument must be an array the size of documents to deserialize"); + if(!args[4]->IsUint32()) return VException("Sixth argument must be a positive index number"); + } + + // If we have 4 arguments + if(args.Length() == 6 && !args[5]->IsObject()) return VException("Fifth argument must be an object with options"); + + // Define pointer to data + Local obj = args[0]->ToObject(); + uint32_t numberOfDocuments = args[2]->Uint32Value(); + uint32_t index = args[1]->Uint32Value(); + uint32_t resultIndex = args[4]->Uint32Value(); + bool promoteLongs = true; + + // Check for the value promoteLongs in the options object + if(args.Length() == 6) { + Local options = args[5]->ToObject(); + + // Check if we have the promoteLong variable + if(options->Has(String::New("promoteLongs"))) { + promoteLongs = options->Get(String::New("promoteLongs"))->ToBoolean()->Value(); + } + } + + // Unpack the BSON parser instance + BSON *bson = ObjectWrap::Unwrap(args.This()); + + // Unpack the buffer variable +#if NODE_MAJOR_VERSION == 0 && NODE_MINOR_VERSION < 3 + Buffer *buffer = ObjectWrap::Unwrap(obj); + char* data = buffer->data(); + size_t length = buffer->length(); +#else + char* data = Buffer::Data(obj); + size_t length = Buffer::Length(obj); +#endif + + // Fetch the documents + Local documents = args[3]->ToObject(); + + BSONDeserializer deserializer(bson, data+index, length-index); + for(uint32_t i = 0; i < numberOfDocuments; i++) + { + try + { + documents->Set(i + resultIndex, deserializer.DeserializeDocument(promoteLongs)); + } + catch (char* exception) + { + Handle error = VException(exception); + free(exception); + return error; + } + } + + // Return new index of parsing + return scope.Close(Uint32::New((uint32_t) (index + deserializer.GetSerializeSize()))); +} + +// Exporting function +extern "C" void init(Handle target) +{ + HandleScope scope; + BSON::Initialize(target); +} + +NODE_MODULE(bson, BSON::Initialize); diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.h b/node_modules/mquery/node_modules/bson/ext/bson.h similarity index 97% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.h rename to node_modules/mquery/node_modules/bson/ext/bson.h index 3638f8269..a152e2eef 100644 --- a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/bson.h +++ b/node_modules/mquery/node_modules/bson/ext/bson.h @@ -1,277 +1,277 @@ -//=========================================================================== - -#ifndef BSON_H_ -#define BSON_H_ - -//=========================================================================== - -#ifdef __arm__ -#define USE_MISALIGNED_MEMORY_ACCESS 0 -#else -#define USE_MISALIGNED_MEMORY_ACCESS 1 -#endif - -#include -#include -#include - -using namespace v8; -using namespace node; - -//=========================================================================== - -enum BsonType -{ - BSON_TYPE_NUMBER = 1, - BSON_TYPE_STRING = 2, - BSON_TYPE_OBJECT = 3, - BSON_TYPE_ARRAY = 4, - BSON_TYPE_BINARY = 5, - BSON_TYPE_UNDEFINED = 6, - BSON_TYPE_OID = 7, - BSON_TYPE_BOOLEAN = 8, - BSON_TYPE_DATE = 9, - BSON_TYPE_NULL = 10, - BSON_TYPE_REGEXP = 11, - BSON_TYPE_CODE = 13, - BSON_TYPE_SYMBOL = 14, - BSON_TYPE_CODE_W_SCOPE = 15, - BSON_TYPE_INT = 16, - BSON_TYPE_TIMESTAMP = 17, - BSON_TYPE_LONG = 18, - BSON_TYPE_MAX_KEY = 0x7f, - BSON_TYPE_MIN_KEY = 0xff -}; - -//=========================================================================== - -template class BSONSerializer; - -class BSON : public ObjectWrap { -public: - BSON(); - ~BSON() {} - - static void Initialize(Handle target); - static Handle BSONDeserializeStream(const Arguments &args); - - // JS based objects - static Handle BSONSerialize(const Arguments &args); - static Handle BSONDeserialize(const Arguments &args); - - // Calculate size of function - static Handle CalculateObjectSize(const Arguments &args); - static Handle SerializeWithBufferAndIndex(const Arguments &args); - - // Constructor used for creating new BSON objects from C++ - static Persistent constructor_template; - -private: - static Handle New(const Arguments &args); - static Handle deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item); - - // BSON type instantiate functions - Persistent longConstructor; - Persistent objectIDConstructor; - Persistent binaryConstructor; - Persistent codeConstructor; - Persistent dbrefConstructor; - Persistent symbolConstructor; - Persistent doubleConstructor; - Persistent timestampConstructor; - Persistent minKeyConstructor; - Persistent maxKeyConstructor; - - // Equality Objects - Persistent longString; - Persistent objectIDString; - Persistent binaryString; - Persistent codeString; - Persistent dbrefString; - Persistent symbolString; - Persistent doubleString; - Persistent timestampString; - Persistent minKeyString; - Persistent maxKeyString; - - // Equality speed up comparison objects - Persistent _bsontypeString; - Persistent _longLowString; - Persistent _longHighString; - Persistent _objectIDidString; - Persistent _binaryPositionString; - Persistent _binarySubTypeString; - Persistent _binaryBufferString; - Persistent _doubleValueString; - Persistent _symbolValueString; - - Persistent _dbRefRefString; - Persistent _dbRefIdRefString; - Persistent _dbRefDbRefString; - Persistent _dbRefNamespaceString; - Persistent _dbRefDbString; - Persistent _dbRefOidString; - - Persistent _codeCodeString; - Persistent _codeScopeString; - Persistent _toBSONString; - - Local GetSerializeObject(const Handle& object); - - template friend class BSONSerializer; - friend class BSONDeserializer; -}; - -//=========================================================================== - -class CountStream -{ -public: - CountStream() : count(0) { } - - void WriteByte(int value) { ++count; } - void WriteByte(const Handle&, const Handle&) { ++count; } - void WriteBool(const Handle& value) { ++count; } - void WriteInt32(int32_t value) { count += 4; } - void WriteInt32(const Handle& value) { count += 4; } - void WriteInt32(const Handle& object, const Handle& key) { count += 4; } - void WriteInt64(int64_t value) { count += 8; } - void WriteInt64(const Handle& value) { count += 8; } - void WriteDouble(double value) { count += 8; } - void WriteDouble(const Handle& value) { count += 8; } - void WriteDouble(const Handle&, const Handle&) { count += 8; } - void WriteUInt32String(uint32_t name) { char buffer[32]; count += sprintf(buffer, "%u", name) + 1; } - void WriteLengthPrefixedString(const Local& value) { count += value->Utf8Length()+5; } - void WriteObjectId(const Handle& object, const Handle& key) { count += 12; } - void WriteString(const Local& value) { count += value->Utf8Length() + 1; } // This returns the number of bytes exclusive of the NULL terminator - void WriteData(const char* data, size_t length) { count += length; } - - void* BeginWriteType() { ++count; return NULL; } - void CommitType(void*, BsonType) { } - void* BeginWriteSize() { count += 4; return NULL; } - void CommitSize(void*) { } - - size_t GetSerializeSize() const { return count; } - - // Do nothing. CheckKey is implemented for DataStream - void CheckKey(const Local&) { } - -private: - size_t count; -}; - -class DataStream -{ -public: - DataStream(char* aDestinationBuffer) : destinationBuffer(aDestinationBuffer), p(aDestinationBuffer) { } - - void WriteByte(int value) { *p++ = value; } - void WriteByte(const Handle& object, const Handle& key) { *p++ = object->Get(key)->Int32Value(); } -#if USE_MISALIGNED_MEMORY_ACCESS - void WriteInt32(int32_t value) { *reinterpret_cast(p) = value; p += 4; } - void WriteInt64(int64_t value) { *reinterpret_cast(p) = value; p += 8; } - void WriteDouble(double value) { *reinterpret_cast(p) = value; p += 8; } -#else - void WriteInt32(int32_t value) { memcpy(p, &value, 4); p += 4; } - void WriteInt64(int64_t value) { memcpy(p, &value, 8); p += 8; } - void WriteDouble(double value) { memcpy(p, &value, 8); p += 8; } -#endif - void WriteBool(const Handle& value) { WriteByte(value->BooleanValue() ? 1 : 0); } - void WriteInt32(const Handle& value) { WriteInt32(value->Int32Value()); } - void WriteInt32(const Handle& object, const Handle& key) { WriteInt32(object->Get(key)); } - void WriteInt64(const Handle& value) { WriteInt64(value->IntegerValue()); } - void WriteDouble(const Handle& value) { WriteDouble(value->NumberValue()); } - void WriteDouble(const Handle& object, const Handle& key) { WriteDouble(object->Get(key)); } - void WriteUInt32String(uint32_t name) { p += sprintf(p, "%u", name) + 1; } - void WriteLengthPrefixedString(const Local& value) { WriteInt32(value->Utf8Length()+1); WriteString(value); } - void WriteObjectId(const Handle& object, const Handle& key); - void WriteString(const Local& value) { p += value->WriteUtf8(p); } // This returns the number of bytes inclusive of the NULL terminator. - void WriteData(const char* data, size_t length) { memcpy(p, data, length); p += length; } - - void* BeginWriteType() { void* returnValue = p; p++; return returnValue; } - void CommitType(void* beginPoint, BsonType value) { *reinterpret_cast(beginPoint) = value; } - void* BeginWriteSize() { void* returnValue = p; p += 4; return returnValue; } - -#if USE_MISALIGNED_MEMORY_ACCESS - void CommitSize(void* beginPoint) { *reinterpret_cast(beginPoint) = (int32_t) (p - (char*) beginPoint); } -#else - void CommitSize(void* beginPoint) { int32_t value = (int32_t) (p - (char*) beginPoint); memcpy(beginPoint, &value, 4); } -#endif - - size_t GetSerializeSize() const { return p - destinationBuffer; } - - void CheckKey(const Local& keyName); - -protected: - char *const destinationBuffer; // base, never changes - char* p; // cursor into buffer -}; - -template class BSONSerializer : public T -{ -private: - typedef T Inherited; - -public: - BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions) : Inherited(), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } - BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions, char* parentParam) : Inherited(parentParam), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } - - void SerializeDocument(const Handle& value); - void SerializeArray(const Handle& value); - void SerializeValue(void* typeLocation, const Handle& value); - -private: - bool checkKeys; - bool serializeFunctions; - BSON* bson; -}; - -//=========================================================================== - -class BSONDeserializer -{ -public: - BSONDeserializer(BSON* aBson, char* data, size_t length); - BSONDeserializer(BSONDeserializer& parentSerializer, size_t length); - - Handle DeserializeDocument(bool promoteLongs); - - bool HasMoreData() const { return p < pEnd; } - Local ReadCString(); - uint32_t ReadIntegerString(); - int32_t ReadRegexOptions(); - Local ReadString(); - Local ReadObjectId(); - - unsigned char ReadByte() { return *reinterpret_cast(p++); } -#if USE_MISALIGNED_MEMORY_ACCESS - int32_t ReadInt32() { int32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } - uint32_t ReadUInt32() { uint32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } - int64_t ReadInt64() { int64_t returnValue = *reinterpret_cast(p); p += 8; return returnValue; } - double ReadDouble() { double returnValue = *reinterpret_cast(p); p += 8; return returnValue; } -#else - int32_t ReadInt32() { int32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } - uint32_t ReadUInt32() { uint32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } - int64_t ReadInt64() { int64_t returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } - double ReadDouble() { double returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } -#endif - - size_t GetSerializeSize() const { return p - pStart; } - -private: - Handle DeserializeArray(bool promoteLongs); - Handle DeserializeValue(BsonType type, bool promoteLongs); - Handle DeserializeDocumentInternal(bool promoteLongs); - Handle DeserializeArrayInternal(bool promoteLongs); - - BSON* bson; - char* const pStart; - char* p; - char* const pEnd; -}; - -//=========================================================================== - -#endif // BSON_H_ - -//=========================================================================== +//=========================================================================== + +#ifndef BSON_H_ +#define BSON_H_ + +//=========================================================================== + +#ifdef __arm__ +#define USE_MISALIGNED_MEMORY_ACCESS 0 +#else +#define USE_MISALIGNED_MEMORY_ACCESS 1 +#endif + +#include +#include +#include + +using namespace v8; +using namespace node; + +//=========================================================================== + +enum BsonType +{ + BSON_TYPE_NUMBER = 1, + BSON_TYPE_STRING = 2, + BSON_TYPE_OBJECT = 3, + BSON_TYPE_ARRAY = 4, + BSON_TYPE_BINARY = 5, + BSON_TYPE_UNDEFINED = 6, + BSON_TYPE_OID = 7, + BSON_TYPE_BOOLEAN = 8, + BSON_TYPE_DATE = 9, + BSON_TYPE_NULL = 10, + BSON_TYPE_REGEXP = 11, + BSON_TYPE_CODE = 13, + BSON_TYPE_SYMBOL = 14, + BSON_TYPE_CODE_W_SCOPE = 15, + BSON_TYPE_INT = 16, + BSON_TYPE_TIMESTAMP = 17, + BSON_TYPE_LONG = 18, + BSON_TYPE_MAX_KEY = 0x7f, + BSON_TYPE_MIN_KEY = 0xff +}; + +//=========================================================================== + +template class BSONSerializer; + +class BSON : public ObjectWrap { +public: + BSON(); + ~BSON() {} + + static void Initialize(Handle target); + static Handle BSONDeserializeStream(const Arguments &args); + + // JS based objects + static Handle BSONSerialize(const Arguments &args); + static Handle BSONDeserialize(const Arguments &args); + + // Calculate size of function + static Handle CalculateObjectSize(const Arguments &args); + static Handle SerializeWithBufferAndIndex(const Arguments &args); + + // Constructor used for creating new BSON objects from C++ + static Persistent constructor_template; + +private: + static Handle New(const Arguments &args); + static Handle deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item); + + // BSON type instantiate functions + Persistent longConstructor; + Persistent objectIDConstructor; + Persistent binaryConstructor; + Persistent codeConstructor; + Persistent dbrefConstructor; + Persistent symbolConstructor; + Persistent doubleConstructor; + Persistent timestampConstructor; + Persistent minKeyConstructor; + Persistent maxKeyConstructor; + + // Equality Objects + Persistent longString; + Persistent objectIDString; + Persistent binaryString; + Persistent codeString; + Persistent dbrefString; + Persistent symbolString; + Persistent doubleString; + Persistent timestampString; + Persistent minKeyString; + Persistent maxKeyString; + + // Equality speed up comparison objects + Persistent _bsontypeString; + Persistent _longLowString; + Persistent _longHighString; + Persistent _objectIDidString; + Persistent _binaryPositionString; + Persistent _binarySubTypeString; + Persistent _binaryBufferString; + Persistent _doubleValueString; + Persistent _symbolValueString; + + Persistent _dbRefRefString; + Persistent _dbRefIdRefString; + Persistent _dbRefDbRefString; + Persistent _dbRefNamespaceString; + Persistent _dbRefDbString; + Persistent _dbRefOidString; + + Persistent _codeCodeString; + Persistent _codeScopeString; + Persistent _toBSONString; + + Local GetSerializeObject(const Handle& object); + + template friend class BSONSerializer; + friend class BSONDeserializer; +}; + +//=========================================================================== + +class CountStream +{ +public: + CountStream() : count(0) { } + + void WriteByte(int value) { ++count; } + void WriteByte(const Handle&, const Handle&) { ++count; } + void WriteBool(const Handle& value) { ++count; } + void WriteInt32(int32_t value) { count += 4; } + void WriteInt32(const Handle& value) { count += 4; } + void WriteInt32(const Handle& object, const Handle& key) { count += 4; } + void WriteInt64(int64_t value) { count += 8; } + void WriteInt64(const Handle& value) { count += 8; } + void WriteDouble(double value) { count += 8; } + void WriteDouble(const Handle& value) { count += 8; } + void WriteDouble(const Handle&, const Handle&) { count += 8; } + void WriteUInt32String(uint32_t name) { char buffer[32]; count += sprintf(buffer, "%u", name) + 1; } + void WriteLengthPrefixedString(const Local& value) { count += value->Utf8Length()+5; } + void WriteObjectId(const Handle& object, const Handle& key) { count += 12; } + void WriteString(const Local& value) { count += value->Utf8Length() + 1; } // This returns the number of bytes exclusive of the NULL terminator + void WriteData(const char* data, size_t length) { count += length; } + + void* BeginWriteType() { ++count; return NULL; } + void CommitType(void*, BsonType) { } + void* BeginWriteSize() { count += 4; return NULL; } + void CommitSize(void*) { } + + size_t GetSerializeSize() const { return count; } + + // Do nothing. CheckKey is implemented for DataStream + void CheckKey(const Local&) { } + +private: + size_t count; +}; + +class DataStream +{ +public: + DataStream(char* aDestinationBuffer) : destinationBuffer(aDestinationBuffer), p(aDestinationBuffer) { } + + void WriteByte(int value) { *p++ = value; } + void WriteByte(const Handle& object, const Handle& key) { *p++ = object->Get(key)->Int32Value(); } +#if USE_MISALIGNED_MEMORY_ACCESS + void WriteInt32(int32_t value) { *reinterpret_cast(p) = value; p += 4; } + void WriteInt64(int64_t value) { *reinterpret_cast(p) = value; p += 8; } + void WriteDouble(double value) { *reinterpret_cast(p) = value; p += 8; } +#else + void WriteInt32(int32_t value) { memcpy(p, &value, 4); p += 4; } + void WriteInt64(int64_t value) { memcpy(p, &value, 8); p += 8; } + void WriteDouble(double value) { memcpy(p, &value, 8); p += 8; } +#endif + void WriteBool(const Handle& value) { WriteByte(value->BooleanValue() ? 1 : 0); } + void WriteInt32(const Handle& value) { WriteInt32(value->Int32Value()); } + void WriteInt32(const Handle& object, const Handle& key) { WriteInt32(object->Get(key)); } + void WriteInt64(const Handle& value) { WriteInt64(value->IntegerValue()); } + void WriteDouble(const Handle& value) { WriteDouble(value->NumberValue()); } + void WriteDouble(const Handle& object, const Handle& key) { WriteDouble(object->Get(key)); } + void WriteUInt32String(uint32_t name) { p += sprintf(p, "%u", name) + 1; } + void WriteLengthPrefixedString(const Local& value) { WriteInt32(value->Utf8Length()+1); WriteString(value); } + void WriteObjectId(const Handle& object, const Handle& key); + void WriteString(const Local& value) { p += value->WriteUtf8(p); } // This returns the number of bytes inclusive of the NULL terminator. + void WriteData(const char* data, size_t length) { memcpy(p, data, length); p += length; } + + void* BeginWriteType() { void* returnValue = p; p++; return returnValue; } + void CommitType(void* beginPoint, BsonType value) { *reinterpret_cast(beginPoint) = value; } + void* BeginWriteSize() { void* returnValue = p; p += 4; return returnValue; } + +#if USE_MISALIGNED_MEMORY_ACCESS + void CommitSize(void* beginPoint) { *reinterpret_cast(beginPoint) = (int32_t) (p - (char*) beginPoint); } +#else + void CommitSize(void* beginPoint) { int32_t value = (int32_t) (p - (char*) beginPoint); memcpy(beginPoint, &value, 4); } +#endif + + size_t GetSerializeSize() const { return p - destinationBuffer; } + + void CheckKey(const Local& keyName); + +protected: + char *const destinationBuffer; // base, never changes + char* p; // cursor into buffer +}; + +template class BSONSerializer : public T +{ +private: + typedef T Inherited; + +public: + BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions) : Inherited(), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } + BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions, char* parentParam) : Inherited(parentParam), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { } + + void SerializeDocument(const Handle& value); + void SerializeArray(const Handle& value); + void SerializeValue(void* typeLocation, const Handle& value); + +private: + bool checkKeys; + bool serializeFunctions; + BSON* bson; +}; + +//=========================================================================== + +class BSONDeserializer +{ +public: + BSONDeserializer(BSON* aBson, char* data, size_t length); + BSONDeserializer(BSONDeserializer& parentSerializer, size_t length); + + Handle DeserializeDocument(bool promoteLongs); + + bool HasMoreData() const { return p < pEnd; } + Local ReadCString(); + uint32_t ReadIntegerString(); + int32_t ReadRegexOptions(); + Local ReadString(); + Local ReadObjectId(); + + unsigned char ReadByte() { return *reinterpret_cast(p++); } +#if USE_MISALIGNED_MEMORY_ACCESS + int32_t ReadInt32() { int32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } + uint32_t ReadUInt32() { uint32_t returnValue = *reinterpret_cast(p); p += 4; return returnValue; } + int64_t ReadInt64() { int64_t returnValue = *reinterpret_cast(p); p += 8; return returnValue; } + double ReadDouble() { double returnValue = *reinterpret_cast(p); p += 8; return returnValue; } +#else + int32_t ReadInt32() { int32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } + uint32_t ReadUInt32() { uint32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; } + int64_t ReadInt64() { int64_t returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } + double ReadDouble() { double returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; } +#endif + + size_t GetSerializeSize() const { return p - pStart; } + +private: + Handle DeserializeArray(bool promoteLongs); + Handle DeserializeValue(BsonType type, bool promoteLongs); + Handle DeserializeDocumentInternal(bool promoteLongs); + Handle DeserializeArrayInternal(bool promoteLongs); + + BSON* bson; + char* const pStart; + char* p; + char* const pEnd; +}; + +//=========================================================================== + +#endif // BSON_H_ + +//=========================================================================== diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/index.js b/node_modules/mquery/node_modules/bson/ext/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/index.js rename to node_modules/mquery/node_modules/bson/ext/index.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/win32/ia32/bson.node b/node_modules/mquery/node_modules/bson/ext/win32/ia32/bson.node similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/win32/ia32/bson.node rename to node_modules/mquery/node_modules/bson/ext/win32/ia32/bson.node diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/win32/x64/bson.node b/node_modules/mquery/node_modules/bson/ext/win32/x64/bson.node similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/win32/x64/bson.node rename to node_modules/mquery/node_modules/bson/ext/win32/x64/bson.node diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/wscript b/node_modules/mquery/node_modules/bson/ext/wscript similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/ext/wscript rename to node_modules/mquery/node_modules/bson/ext/wscript diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/binary.js b/node_modules/mquery/node_modules/bson/lib/bson/binary.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/binary.js rename to node_modules/mquery/node_modules/bson/lib/bson/binary.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/binary_parser.js b/node_modules/mquery/node_modules/bson/lib/bson/binary_parser.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/binary_parser.js rename to node_modules/mquery/node_modules/bson/lib/bson/binary_parser.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/bson.js b/node_modules/mquery/node_modules/bson/lib/bson/bson.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/bson.js rename to node_modules/mquery/node_modules/bson/lib/bson/bson.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/code.js b/node_modules/mquery/node_modules/bson/lib/bson/code.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/code.js rename to node_modules/mquery/node_modules/bson/lib/bson/code.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/db_ref.js b/node_modules/mquery/node_modules/bson/lib/bson/db_ref.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/db_ref.js rename to node_modules/mquery/node_modules/bson/lib/bson/db_ref.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/double.js b/node_modules/mquery/node_modules/bson/lib/bson/double.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/double.js rename to node_modules/mquery/node_modules/bson/lib/bson/double.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/float_parser.js b/node_modules/mquery/node_modules/bson/lib/bson/float_parser.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/float_parser.js rename to node_modules/mquery/node_modules/bson/lib/bson/float_parser.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/index.js b/node_modules/mquery/node_modules/bson/lib/bson/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/index.js rename to node_modules/mquery/node_modules/bson/lib/bson/index.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/long.js b/node_modules/mquery/node_modules/bson/lib/bson/long.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/long.js rename to node_modules/mquery/node_modules/bson/lib/bson/long.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/max_key.js b/node_modules/mquery/node_modules/bson/lib/bson/max_key.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/max_key.js rename to node_modules/mquery/node_modules/bson/lib/bson/max_key.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/min_key.js b/node_modules/mquery/node_modules/bson/lib/bson/min_key.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/min_key.js rename to node_modules/mquery/node_modules/bson/lib/bson/min_key.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/objectid.js b/node_modules/mquery/node_modules/bson/lib/bson/objectid.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/objectid.js rename to node_modules/mquery/node_modules/bson/lib/bson/objectid.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/symbol.js b/node_modules/mquery/node_modules/bson/lib/bson/symbol.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/symbol.js rename to node_modules/mquery/node_modules/bson/lib/bson/symbol.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/timestamp.js b/node_modules/mquery/node_modules/bson/lib/bson/timestamp.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/lib/bson/timestamp.js rename to node_modules/mquery/node_modules/bson/lib/bson/timestamp.js diff --git a/node_modules/mquery/node_modules/bson/package.json b/node_modules/mquery/node_modules/bson/package.json new file mode 100644 index 000000000..d5358f99b --- /dev/null +++ b/node_modules/mquery/node_modules/bson/package.json @@ -0,0 +1,73 @@ +{ + "_from": "bson@0.2.2", + "_id": "bson@0.2.2", + "_inBundle": false, + "_integrity": "sha1-Pb+YSsudM6aHi0bm+3r71hGFamA=", + "_location": "/mquery/bson", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "bson@0.2.2", + "name": "bson", + "escapedName": "bson", + "rawSpec": "0.2.2", + "saveSpec": null, + "fetchSpec": "0.2.2" + }, + "_requiredBy": [ + "/mquery/mongodb" + ], + "_resolved": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz", + "_shasum": "3dbf984acb9d33a6878b46e6fb7afbd611856a60", + "_spec": "bson@0.2.2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mquery/node_modules/mongodb", + "author": { + "name": "Christian Amor Kvalheim", + "email": "christkv@gmail.com" + }, + "bugs": { + "url": "https://github.com/mongodb/js-bson/issues" + }, + "bundleDependencies": false, + "config": { + "native": false + }, + "contributors": [], + "deprecated": "Fixed a critical issue with BSON serialization documented in CVE-2019-2391, see https://bit.ly/2KcpXdo for more details", + "description": "A bson parser for node.js and the browser", + "devDependencies": { + "gleak": "0.2.3", + "nodeunit": "0.7.3", + "one": "2.X.X" + }, + "directories": { + "lib": "./lib/bson" + }, + "engines": { + "node": ">=0.6.19" + }, + "homepage": "https://github.com/mongodb/js-bson#readme", + "keywords": [ + "mongodb", + "bson", + "parser" + ], + "licenses": [ + { + "type": "Apache License, Version 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "./lib/bson/index", + "name": "bson", + "repository": { + "type": "git", + "url": "git://github.com/mongodb/js-bson.git" + }, + "scripts": { + "install": "(node-gyp rebuild 2> builderror.log) || (exit 0)", + "test": "nodeunit ./test/node && TEST_NATIVE=TRUE nodeunit ./test/node" + }, + "version": "0.2.2" +} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/gleak.js b/node_modules/mquery/node_modules/bson/tools/gleak.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/gleak.js rename to node_modules/mquery/node_modules/bson/tools/gleak.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE b/node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE rename to node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/MIT.LICENSE diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js b/node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js rename to node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine-html.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.css b/node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine.css similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.css rename to node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine.css diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.js b/node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine.js rename to node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png b/node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png rename to node_modules/mquery/node_modules/bson/tools/jasmine-1.1.0/jasmine_favicon.png diff --git a/node_modules/express/node_modules/cookie-signature/.npmignore b/node_modules/mquery/node_modules/debug/.npmignore similarity index 100% rename from node_modules/express/node_modules/cookie-signature/.npmignore rename to node_modules/mquery/node_modules/debug/.npmignore diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/History.md b/node_modules/mquery/node_modules/debug/History.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/History.md rename to node_modules/mquery/node_modules/debug/History.md diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/Makefile b/node_modules/mquery/node_modules/debug/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/Makefile rename to node_modules/mquery/node_modules/debug/Makefile diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/Readme.md b/node_modules/mquery/node_modules/debug/Readme.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/Readme.md rename to node_modules/mquery/node_modules/debug/Readme.md diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/debug.component.js b/node_modules/mquery/node_modules/debug/debug.component.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/debug.component.js rename to node_modules/mquery/node_modules/debug/debug.component.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/debug.js b/node_modules/mquery/node_modules/debug/debug.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/debug.js rename to node_modules/mquery/node_modules/debug/debug.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/example/app.js b/node_modules/mquery/node_modules/debug/example/app.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/example/app.js rename to node_modules/mquery/node_modules/debug/example/app.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/example/browser.html b/node_modules/mquery/node_modules/debug/example/browser.html similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/example/browser.html rename to node_modules/mquery/node_modules/debug/example/browser.html diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/example/wildcards.js b/node_modules/mquery/node_modules/debug/example/wildcards.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/example/wildcards.js rename to node_modules/mquery/node_modules/debug/example/wildcards.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/example/worker.js b/node_modules/mquery/node_modules/debug/example/worker.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/example/worker.js rename to node_modules/mquery/node_modules/debug/example/worker.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/head.js b/node_modules/mquery/node_modules/debug/head.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/head.js rename to node_modules/mquery/node_modules/debug/head.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/index.js b/node_modules/mquery/node_modules/debug/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/index.js rename to node_modules/mquery/node_modules/debug/index.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/lib/debug.js b/node_modules/mquery/node_modules/debug/lib/debug.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/lib/debug.js rename to node_modules/mquery/node_modules/debug/lib/debug.js diff --git a/node_modules/mquery/node_modules/debug/package.json b/node_modules/mquery/node_modules/debug/package.json new file mode 100644 index 000000000..fd8f99bf9 --- /dev/null +++ b/node_modules/mquery/node_modules/debug/package.json @@ -0,0 +1,53 @@ +{ + "_from": "debug@0.7.0", + "_id": "debug@0.7.0", + "_inBundle": false, + "_integrity": "sha1-9b4F7AQ0yZLXmUDlCyaVz7LgGwg=", + "_location": "/mquery/debug", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "debug@0.7.0", + "name": "debug", + "escapedName": "debug", + "rawSpec": "0.7.0", + "saveSpec": null, + "fetchSpec": "0.7.0" + }, + "_requiredBy": [ + "/mquery" + ], + "_resolved": "https://registry.npmjs.org/debug/-/debug-0.7.0.tgz", + "_shasum": "f5be05ec0434c992d79940e50b2695cfb2e01b08", + "_spec": "debug@0.7.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mquery", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "browserify": "debug.component.js", + "bundleDependencies": false, + "component": { + "scripts": { + "debug": "debug.component.js" + } + }, + "dependencies": {}, + "deprecated": false, + "description": "small debugging utility", + "devDependencies": { + "mocha": "*" + }, + "engines": { + "node": "*" + }, + "keywords": [ + "debug", + "log", + "debugger" + ], + "main": "index", + "name": "debug", + "version": "0.7.0" +} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/tail.js b/node_modules/mquery/node_modules/debug/tail.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/tail.js rename to node_modules/mquery/node_modules/debug/tail.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/.travis.yml b/node_modules/mquery/node_modules/mongodb/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/.travis.yml rename to node_modules/mquery/node_modules/mongodb/.travis.yml diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/CONTRIBUTING.md b/node_modules/mquery/node_modules/mongodb/CONTRIBUTING.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/CONTRIBUTING.md rename to node_modules/mquery/node_modules/mongodb/CONTRIBUTING.md diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/LICENSE b/node_modules/mquery/node_modules/mongodb/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/LICENSE rename to node_modules/mquery/node_modules/mongodb/LICENSE diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/Makefile b/node_modules/mquery/node_modules/mongodb/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/Makefile rename to node_modules/mquery/node_modules/mongodb/Makefile diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/Readme.md b/node_modules/mquery/node_modules/mongodb/Readme.md similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/Readme.md rename to node_modules/mquery/node_modules/mongodb/Readme.md diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/index.js b/node_modules/mquery/node_modules/mongodb/index.js old mode 100644 new mode 100755 similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/index.js rename to node_modules/mquery/node_modules/mongodb/index.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/admin.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/admin.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/admin.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/admin.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_cr.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_cr.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_cr.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_cr.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_gssapi.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_gssapi.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_gssapi.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_gssapi.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_plain.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_plain.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_plain.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_plain.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_sspi.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_sspi.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_sspi.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/auth/mongodb_sspi.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/collection.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/collection.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/collection.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/collection.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/base_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/base_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/base_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/base_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/db_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/db_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/db_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/db_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/delete_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/delete_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/delete_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/delete_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/get_more_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/get_more_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/get_more_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/get_more_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/insert_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/insert_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/insert_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/insert_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/kill_cursor_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/kill_cursor_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/kill_cursor_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/kill_cursor_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/query_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/query_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/query_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/query_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/update_command.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/update_command.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/update_command.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/commands/update_command.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/base.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/base.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/base.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/base.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_pool.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_pool.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_pool.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_pool.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_utils.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_utils.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_utils.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/connection_utils.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/mongos.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/mongos.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/mongos.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/mongos.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/read_preference.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/read_preference.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/read_preference.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/read_preference.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/ha.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/ha.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/ha.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/ha.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/options.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/options.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/options.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/options.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set_state.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set_state.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set_state.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/repl_set_state.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/ping_strategy.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/ping_strategy.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/ping_strategy.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/ping_strategy.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/statistics_strategy.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/statistics_strategy.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/statistics_strategy.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/repl_set/strategies/statistics_strategy.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/server.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/server.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/server.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/server.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/url_parser.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/url_parser.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/url_parser.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/connection/url_parser.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursor.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursor.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursor.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/cursor.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursorstream.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursorstream.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/cursorstream.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/cursorstream.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/db.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/db.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/db.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/db.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/chunk.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/chunk.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/chunk.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/chunk.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/grid.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/grid.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/grid.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/grid.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/gridstore.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/gridstore.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/gridstore.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/gridstore.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/readstream.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/readstream.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/readstream.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/gridfs/readstream.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/index.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/index.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/index.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/mongo_client.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/mongo_client.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/mongo_client.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/mongo_client.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/responses/mongo_reply.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/responses/mongo_reply.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/responses/mongo_reply.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/responses/mongo_reply.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/utils.js b/node_modules/mquery/node_modules/mongodb/lib/mongodb/utils.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/lib/mongodb/utils.js rename to node_modules/mquery/node_modules/mongodb/lib/mongodb/utils.js diff --git a/node_modules/mquery/node_modules/mongodb/package.json b/node_modules/mquery/node_modules/mongodb/package.json new file mode 100755 index 000000000..467064c03 --- /dev/null +++ b/node_modules/mquery/node_modules/mongodb/package.json @@ -0,0 +1,245 @@ +{ + "_from": "mongodb@1.3.19", + "_id": "mongodb@1.3.19", + "_inBundle": false, + "_integrity": "sha1-8inbJAmPAZ2G0TWq+KGrXyZYsdQ=", + "_location": "/mquery/mongodb", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "mongodb@1.3.19", + "name": "mongodb", + "escapedName": "mongodb", + "rawSpec": "1.3.19", + "saveSpec": null, + "fetchSpec": "1.3.19" + }, + "_requiredBy": [ + "/mquery" + ], + "_resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "_shasum": "f229db24098f019d86d135aaf8a1ab5f2658b1d4", + "_spec": "mongodb@1.3.19", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mquery", + "author": { + "name": "Christian Amor Kvalheim", + "email": "christkv@gmail.com" + }, + "bugs": { + "url": "http://github.com/mongodb/node-mongodb-native/issues" + }, + "bundleDependencies": false, + "config": { + "native": false + }, + "contributors": [ + { + "name": "Aaron Heckmann" + }, + { + "name": "Christoph Pojer" + }, + { + "name": "Pau Ramon Revilla" + }, + { + "name": "Nathan White" + }, + { + "name": "Emmerman" + }, + { + "name": "Seth LaForge" + }, + { + "name": "Boris Filipov" + }, + { + "name": "Stefan Schärmeli" + }, + { + "name": "Tedde Lundgren" + }, + { + "name": "renctan" + }, + { + "name": "Sergey Ukustov" + }, + { + "name": "Ciaran Jessup" + }, + { + "name": "kuno" + }, + { + "name": "srimonti" + }, + { + "name": "Erik Abele" + }, + { + "name": "Pratik Daga" + }, + { + "name": "Slobodan Utvic" + }, + { + "name": "Kristina Chodorow" + }, + { + "name": "Yonathan Randolph" + }, + { + "name": "Brian Noguchi" + }, + { + "name": "Sam Epstein" + }, + { + "name": "James Harrison Fisher" + }, + { + "name": "Vladimir Dronnikov" + }, + { + "name": "Ben Hockey" + }, + { + "name": "Henrik Johansson" + }, + { + "name": "Simon Weare" + }, + { + "name": "Alex Gorbatchev" + }, + { + "name": "Shimon Doodkin" + }, + { + "name": "Kyle Mueller" + }, + { + "name": "Eran Hammer-Lahav" + }, + { + "name": "Marcin Ciszak" + }, + { + "name": "François de Metz" + }, + { + "name": "Vinay Pulim" + }, + { + "name": "nstielau" + }, + { + "name": "Adam Wiggins" + }, + { + "name": "entrinzikyl" + }, + { + "name": "Jeremy Selier" + }, + { + "name": "Ian Millington" + }, + { + "name": "Public Keating" + }, + { + "name": "andrewjstone" + }, + { + "name": "Christopher Stott" + }, + { + "name": "Corey Jewett" + }, + { + "name": "brettkiefer" + }, + { + "name": "Rob Holland" + }, + { + "name": "Senmiao Liu" + }, + { + "name": "heroic" + }, + { + "name": "gitfy" + }, + { + "name": "Andrew Stone" + }, + { + "name": "John Le Drew" + }, + { + "name": "Lucasfilm Singapore" + }, + { + "name": "Roman Shtylman" + }, + { + "name": "Matt Self" + } + ], + "dependencies": { + "bson": "0.2.2", + "kerberos": "0.0.3" + }, + "deprecated": "Please upgrade to 2.2.19 or higher", + "description": "A node.js driver for MongoDB", + "devDependencies": { + "async": "0.1.22", + "dox": "0.2.0", + "ejs": "0.6.1", + "gleak": "0.2.3", + "integra": "latest", + "markdown": "0.3.1", + "nodeunit": "0.7.4", + "optimist": "latest", + "request": "2.12.0", + "step": "0.0.5", + "uglify-js": "1.2.5" + }, + "directories": { + "lib": "./lib/mongodb" + }, + "engines": { + "node": ">=0.6.19" + }, + "homepage": "http://mongodb.github.com/node-mongodb-native/", + "keywords": [ + "mongodb", + "mongo", + "driver", + "db" + ], + "licenses": [ + { + "type": "Apache License, Version 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "./lib/mongodb/index", + "name": "mongodb", + "optionalDependencies": { + "kerberos": "0.0.3" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mongodb/node-mongodb-native.git" + }, + "scripts": { + "test": "make test_functional" + }, + "version": "1.3.19" +} diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/mongodb/t.js b/node_modules/mquery/node_modules/mongodb/t.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/mongodb/t.js rename to node_modules/mquery/node_modules/mongodb/t.js diff --git a/node_modules/mquery/package.json b/node_modules/mquery/package.json new file mode 100644 index 000000000..53785e228 --- /dev/null +++ b/node_modules/mquery/package.json @@ -0,0 +1,63 @@ +{ + "_from": "mquery@0.4.1", + "_id": "mquery@0.4.1", + "_inBundle": false, + "_integrity": "sha1-xGJt0pID85kavp3bqIeBJWrUL98=", + "_location": "/mquery", + "_phantomChildren": { + "kerberos": "0.0.3" + }, + "_requested": { + "type": "version", + "registry": true, + "raw": "mquery@0.4.1", + "name": "mquery", + "escapedName": "mquery", + "rawSpec": "0.4.1", + "saveSpec": null, + "fetchSpec": "0.4.1" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/mquery/-/mquery-0.4.1.tgz", + "_shasum": "c4626dd29203f3991abe9ddba88781256ad42fdf", + "_spec": "mquery@0.4.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/mquery/issues/new" + }, + "bundleDependencies": false, + "dependencies": { + "debug": "0.7.0", + "mongodb": "1.3.19", + "regexp-clone": "0.0.1", + "sliced": "0.0.5" + }, + "deprecated": false, + "description": "Expressive query building for MongoDB", + "devDependencies": { + "mocha": "1.9.x" + }, + "homepage": "https://github.com/aheckmann/mquery/", + "keywords": [ + "mongodb", + "query", + "builder" + ], + "license": "MIT", + "main": "index.js", + "name": "mquery", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/mquery.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.4.1" +} diff --git a/node_modules/mongoose/node_modules/mquery/test/collection/browser.js b/node_modules/mquery/test/collection/browser.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/test/collection/browser.js rename to node_modules/mquery/test/collection/browser.js diff --git a/node_modules/mongoose/node_modules/mquery/test/collection/mongo.js b/node_modules/mquery/test/collection/mongo.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/test/collection/mongo.js rename to node_modules/mquery/test/collection/mongo.js diff --git a/node_modules/mongoose/node_modules/mquery/test/collection/node.js b/node_modules/mquery/test/collection/node.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/test/collection/node.js rename to node_modules/mquery/test/collection/node.js diff --git a/node_modules/mongoose/node_modules/mquery/test/env.js b/node_modules/mquery/test/env.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/test/env.js rename to node_modules/mquery/test/env.js diff --git a/node_modules/mongoose/node_modules/mquery/test/index.js b/node_modules/mquery/test/index.js similarity index 100% rename from node_modules/mongoose/node_modules/mquery/test/index.js rename to node_modules/mquery/test/index.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/.npmignore b/node_modules/ms/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/.npmignore rename to node_modules/ms/.npmignore diff --git a/node_modules/mongoose/node_modules/ms/Makefile b/node_modules/ms/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/ms/Makefile rename to node_modules/ms/Makefile diff --git a/node_modules/mongoose/node_modules/ms/README.md b/node_modules/ms/README.md similarity index 100% rename from node_modules/mongoose/node_modules/ms/README.md rename to node_modules/ms/README.md diff --git a/node_modules/mongoose/node_modules/ms/ms.js b/node_modules/ms/ms.js similarity index 100% rename from node_modules/mongoose/node_modules/ms/ms.js rename to node_modules/ms/ms.js diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json new file mode 100644 index 000000000..35c31322c --- /dev/null +++ b/node_modules/ms/package.json @@ -0,0 +1,39 @@ +{ + "_from": "ms@0.1.0", + "_id": "ms@0.1.0", + "_inBundle": false, + "_integrity": "sha1-8h+sSQ2vHXZn/RgP6QdzicyUQrI=", + "_location": "/ms", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "ms@0.1.0", + "name": "ms", + "escapedName": "ms", + "rawSpec": "0.1.0", + "saveSpec": null, + "fetchSpec": "0.1.0" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz", + "_shasum": "f21fac490daf1d7667fd180fe9077389cc9442b2", + "_spec": "ms@0.1.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "bundleDependencies": false, + "deprecated": false, + "description": "Tiny ms conversion utility", + "devDependencies": { + "expect.js": "*", + "mocha": "*", + "serve": "*" + }, + "engines": { + "node": "*" + }, + "main": "./ms", + "name": "ms", + "version": "0.1.0" +} diff --git a/node_modules/mongoose/node_modules/ms/test/index.html b/node_modules/ms/test/index.html similarity index 100% rename from node_modules/mongoose/node_modules/ms/test/index.html rename to node_modules/ms/test/index.html diff --git a/node_modules/mongoose/node_modules/ms/test/support/jquery.js b/node_modules/ms/test/support/jquery.js similarity index 100% rename from node_modules/mongoose/node_modules/ms/test/support/jquery.js rename to node_modules/ms/test/support/jquery.js diff --git a/node_modules/mongoose/node_modules/ms/test/test.js b/node_modules/ms/test/test.js similarity index 100% rename from node_modules/mongoose/node_modules/ms/test/test.js rename to node_modules/ms/test/test.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/.jshintrc b/node_modules/multiparty/.jshintrc similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/.jshintrc rename to node_modules/multiparty/.jshintrc diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/.npmignore b/node_modules/multiparty/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/.npmignore rename to node_modules/multiparty/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/.travis.yml b/node_modules/multiparty/.travis.yml similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/.travis.yml rename to node_modules/multiparty/.travis.yml diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/CHANGELOG.md b/node_modules/multiparty/CHANGELOG.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/CHANGELOG.md rename to node_modules/multiparty/CHANGELOG.md diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/LICENSE b/node_modules/multiparty/LICENSE similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/LICENSE rename to node_modules/multiparty/LICENSE diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/README.md b/node_modules/multiparty/README.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/README.md rename to node_modules/multiparty/README.md diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/examples/azureblobstorage.js b/node_modules/multiparty/examples/azureblobstorage.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/examples/azureblobstorage.js rename to node_modules/multiparty/examples/azureblobstorage.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/examples/s3.js b/node_modules/multiparty/examples/s3.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/examples/s3.js rename to node_modules/multiparty/examples/s3.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/examples/upload.js b/node_modules/multiparty/examples/upload.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/examples/upload.js rename to node_modules/multiparty/examples/upload.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/index.js b/node_modules/multiparty/index.js old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/index.js rename to node_modules/multiparty/index.js diff --git a/node_modules/multiparty/package.json b/node_modules/multiparty/package.json new file mode 100644 index 000000000..f0c83d3d9 --- /dev/null +++ b/node_modules/multiparty/package.json @@ -0,0 +1,64 @@ +{ + "_from": "multiparty@2.2.0", + "_id": "multiparty@2.2.0", + "_inBundle": false, + "_integrity": "sha1-pWfCrwAK0i3I8qZT2Rl4rh9TFvQ=", + "_location": "/multiparty", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "multiparty@2.2.0", + "name": "multiparty", + "escapedName": "multiparty", + "rawSpec": "2.2.0", + "saveSpec": null, + "fetchSpec": "2.2.0" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", + "_shasum": "a567c2af000ad22dc8f2a653d91978ae1f5316f4", + "_spec": "multiparty@2.2.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "bugs": { + "url": "https://github.com/superjoe30/node-multiparty/issues" + }, + "bundleDependencies": false, + "dependencies": { + "readable-stream": "~1.1.9", + "stream-counter": "~0.2.0" + }, + "deprecated": false, + "description": "multipart/form-data parser which supports streaming", + "devDependencies": { + "findit": "0.1.1", + "hashish": "0.0.4", + "mkdirp": "~0.3.5", + "mocha": "~1.8.2", + "request": "~2.16.6", + "superagent": "~0.14.1" + }, + "engines": { + "node": ">=0.8.0" + }, + "homepage": "https://github.com/superjoe30/node-multiparty#readme", + "keywords": [ + "file", + "upload", + "formidable", + "stream", + "s3" + ], + "license": "MIT", + "name": "multiparty", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/superjoe30/node-multiparty.git" + }, + "scripts": { + "test": "ulimit -n 500 && mocha --timeout 4000 --reporter spec --recursive test/test.js" + }, + "version": "2.2.0" +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/bench-multipart-parser.js b/node_modules/multiparty/test/bench-multipart-parser.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/bench-multipart-parser.js rename to node_modules/multiparty/test/bench-multipart-parser.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/beta-sticker-1.png b/node_modules/multiparty/test/fixture/file/beta-sticker-1.png similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/beta-sticker-1.png rename to node_modules/multiparty/test/fixture/file/beta-sticker-1.png diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/blank.gif b/node_modules/multiparty/test/fixture/file/blank.gif old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/blank.gif rename to node_modules/multiparty/test/fixture/file/blank.gif diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/funkyfilename.txt b/node_modules/multiparty/test/fixture/file/funkyfilename.txt similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/funkyfilename.txt rename to node_modules/multiparty/test/fixture/file/funkyfilename.txt diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/menu_separator.png b/node_modules/multiparty/test/fixture/file/menu_separator.png similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/menu_separator.png rename to node_modules/multiparty/test/fixture/file/menu_separator.png diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/pf1y5.png b/node_modules/multiparty/test/fixture/file/pf1y5.png similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/pf1y5.png rename to node_modules/multiparty/test/fixture/file/pf1y5.png diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/plain.txt b/node_modules/multiparty/test/fixture/file/plain.txt similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/file/plain.txt rename to node_modules/multiparty/test/fixture/file/plain.txt diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http b/node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http similarity index 99% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http rename to node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http index 833b83ca9..7bfc6dcca 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http +++ b/node_modules/multiparty/test/fixture/http/encoding/beta-sticker-1.png.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Length: 2483 - ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Disposition: form-data; name="sticker"; filename="beta-sticker-1.png" -Content-Type: image/png -Content-Transfer-Encoding: base64 - -iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAABh5JREFUeNrMmHtIHEcYwGfv5SNwaovxEanEiJKqlYCCTRo1f0SvDeof1legEcE/YttQaNOiaQjYFFtpKaJILZU8SCRUWqlJGpoWepGLTXqUEnzFxCrnK9DEelbvvPOe/WacuY7r7HmGFjrwsbNzt7u//V7zfYvQ/2xI/9K1/NyvMP9PgCTuGmmL6/0ckD9UOGmbIExUsqMkAPHJjv5QwKRtgKioqDlh5+w/7IFeCuLlxCeA2zQ0IcCwh2qoaLH09fUdTElJ2e/1elU+n0/y+9fvPz4+fvfYsWN3YOoBcXPiocLghD4mBYHhQTCErqWlZU9FRcXJqKiowyqVSk/uSEH4o8fjWVlYWDB2d3e3d3R0WGB5jYqLg/NyGgsKxMNgkDB4451NTU3vxcXF1SlBKB0tFsuVxsbGjlu3bj2GJQeIk8K5RVBqBTMxrYRfuHAh9/jx4+ejo6MPS9I6f6hHPOC6rOLi4vyVlZXf7t27Z5c5/iZfkgMxxyUwFy9ezC0tLe3V6XRJ/MOCAYjWwsLCni0oKCh98uSJaWhoyMZFn0/uT2qBqYi/1NbWxjc0NJwPFUYExc/B53R5eXk5ZrN5YH5+3slFn5+D2uBDzG90IJETExOtzGdC9RelNf78wYMH3xQWFn4Ep0sgyyCr1NmJP6kEIa5tbW3dEx8fXxeKRoJpT76OR3p6enllZWUKTCOwNalFAglWDkTCvLq6+uR2YYKZSw4GQVKNfZQCafjkqhKYTBsTE3NY/uYi2Q4MP5KTkw9QGB3VEMv6G/YioqFLly5lazQavfytxobnUW+PWTGisIyNPEL3QYLB4PPIyMi4EydO7JUBbTIZ0RDYOFPkE8t/OdHczCK6Y/qdzP8BfUTW8Tj/uQndvT1F5vOzVvTLz1PwX4cQbt++fekURsNpSNLIw16v1z/HLsRRgecsSnovm8nxs5bvUe+NN1Bz47fkfBaAXj2aA2BWEsM/3hhFX1/5Fe3NTEAfvn8NXTO+tSH68IiNjU2Qw/AmCzg2XCQp+YyhJAu9c+pl9GJ+KmhiEt38bhjpoyJQRtYudA60k3dwD6o4mouKjmSiolcy0ArRqnXz3rT+knwFEShhNKLNlmmFP7Kf8XxuehHpj0QQmLdPGch/ioYyCSAe57pMaHnJgcprctDdwUkRjKi8CUTWhipvbm7uvlJo3zFNoHJDOznPeGEXqn+9EBUf+AQZXvqU+BEG/KCpHz2flYh+ALO9++ZX5L/Mj3gfevjw4ZRoP+PzD/b4HadPn844c+aMkb0F1DqIz9byzBvquXytvr6+7vr16+Ow9CfN2njjdfFAWpo9o2FnNmm12kQMw24gcvSnhbHb7Y+huHsNlhapLNHSxK3idlq287qhhrkKlSByOBzIZrPhGyCn04ncbjfRGAMV5ZlQxvDw8E+yYi1Q3qpleYjUQlNTU5aysrJqgNBhIAwGVSDCkFj48BVFULA1eCl7XV3dx1CKYK3YqKnY7u9Ti2royclJ76FDh1YhxefgsoFpCIOtra0RuGBQwYbRaLzc1dVlpjA2ZiqmKbWsDAmEYU9Pz8Tg4OCNoqKixNTU1BQostDq6iqBcrlcRBiYfEff1KBR+OnpabPBYOikWlnhtOOWm0zUffpnZ2ednZ2dJtCYMTs7+xkA2x0eHk6gsMYwFPYr/EC1Wo2LMEWzWa1WC1QRZ8FUVgpj42ohD3umWqHjRFxf5RkZGVkCNQ9CcTWQn5+flpSUtBOiMKAt7Fek/FSAmpmZMVdVVZ0dGxv7g4PhteMVlbBIofv0sh4Lbmhtb2+/Cbv1eFpaWmJCQsJODMO0hGGgUghAAay9v7//i5KSki9lmmG+4+Jg/MHaIH6f0dCkqaNFFc5VkViam5v319TUNEDdvRubEGsNYHGqsAwMDFxta2u7DdpdpA+3c+LgWiHfVkCiFnpDw0iLqwgqO6BVKoPo00K6WIDsOzE6OrpE395FzeLgxMn5jVe0dYTa26s5jfFg4VR0nAuwNtrFda1rgmToD6VzVWq3eTPyYAxOwwH5gvT2PiWY7X4fUgJTywp1fivyyL6E+Lb6XvQ0X9AkBeeXZED+p/k+9LcAAwAXm3hBLzoZPAAAAABJRU5ErkJggg== ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Length: 2483 + +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Disposition: form-data; name="sticker"; filename="beta-sticker-1.png" +Content-Type: image/png +Content-Transfer-Encoding: base64 + +iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAABh5JREFUeNrMmHtIHEcYwGfv5SNwaovxEanEiJKqlYCCTRo1f0SvDeof1legEcE/YttQaNOiaQjYFFtpKaJILZU8SCRUWqlJGpoWepGLTXqUEnzFxCrnK9DEelbvvPOe/WacuY7r7HmGFjrwsbNzt7u//V7zfYvQ/2xI/9K1/NyvMP9PgCTuGmmL6/0ckD9UOGmbIExUsqMkAPHJjv5QwKRtgKioqDlh5+w/7IFeCuLlxCeA2zQ0IcCwh2qoaLH09fUdTElJ2e/1elU+n0/y+9fvPz4+fvfYsWN3YOoBcXPiocLghD4mBYHhQTCErqWlZU9FRcXJqKiowyqVSk/uSEH4o8fjWVlYWDB2d3e3d3R0WGB5jYqLg/NyGgsKxMNgkDB4451NTU3vxcXF1SlBKB0tFsuVxsbGjlu3bj2GJQeIk8K5RVBqBTMxrYRfuHAh9/jx4+ejo6MPS9I6f6hHPOC6rOLi4vyVlZXf7t27Z5c5/iZfkgMxxyUwFy9ezC0tLe3V6XRJ/MOCAYjWwsLCni0oKCh98uSJaWhoyMZFn0/uT2qBqYi/1NbWxjc0NJwPFUYExc/B53R5eXk5ZrN5YH5+3slFn5+D2uBDzG90IJETExOtzGdC9RelNf78wYMH3xQWFn4Ep0sgyyCr1NmJP6kEIa5tbW3dEx8fXxeKRoJpT76OR3p6enllZWUKTCOwNalFAglWDkTCvLq6+uR2YYKZSw4GQVKNfZQCafjkqhKYTBsTE3NY/uYi2Q4MP5KTkw9QGB3VEMv6G/YioqFLly5lazQavfytxobnUW+PWTGisIyNPEL3QYLB4PPIyMi4EydO7JUBbTIZ0RDYOFPkE8t/OdHczCK6Y/qdzP8BfUTW8Tj/uQndvT1F5vOzVvTLz1PwX4cQbt++fekURsNpSNLIw16v1z/HLsRRgecsSnovm8nxs5bvUe+NN1Bz47fkfBaAXj2aA2BWEsM/3hhFX1/5Fe3NTEAfvn8NXTO+tSH68IiNjU2Qw/AmCzg2XCQp+YyhJAu9c+pl9GJ+KmhiEt38bhjpoyJQRtYudA60k3dwD6o4mouKjmSiolcy0ArRqnXz3rT+knwFEShhNKLNlmmFP7Kf8XxuehHpj0QQmLdPGch/ioYyCSAe57pMaHnJgcprctDdwUkRjKi8CUTWhipvbm7uvlJo3zFNoHJDOznPeGEXqn+9EBUf+AQZXvqU+BEG/KCpHz2flYh+ALO9++ZX5L/Mj3gfevjw4ZRoP+PzD/b4HadPn844c+aMkb0F1DqIz9byzBvquXytvr6+7vr16+Ow9CfN2njjdfFAWpo9o2FnNmm12kQMw24gcvSnhbHb7Y+huHsNlhapLNHSxK3idlq287qhhrkKlSByOBzIZrPhGyCn04ncbjfRGAMV5ZlQxvDw8E+yYi1Q3qpleYjUQlNTU5aysrJqgNBhIAwGVSDCkFj48BVFULA1eCl7XV3dx1CKYK3YqKnY7u9Ti2royclJ76FDh1YhxefgsoFpCIOtra0RuGBQwYbRaLzc1dVlpjA2ZiqmKbWsDAmEYU9Pz8Tg4OCNoqKixNTU1BQostDq6iqBcrlcRBiYfEff1KBR+OnpabPBYOikWlnhtOOWm0zUffpnZ2ednZ2dJtCYMTs7+xkA2x0eHk6gsMYwFPYr/EC1Wo2LMEWzWa1WC1QRZ8FUVgpj42ohD3umWqHjRFxf5RkZGVkCNQ9CcTWQn5+flpSUtBOiMKAt7Fek/FSAmpmZMVdVVZ0dGxv7g4PhteMVlbBIofv0sh4Lbmhtb2+/Cbv1eFpaWmJCQsJODMO0hGGgUghAAay9v7//i5KSki9lmmG+4+Jg/MHaIH6f0dCkqaNFFc5VkViam5v319TUNEDdvRubEGsNYHGqsAwMDFxta2u7DdpdpA+3c+LgWiHfVkCiFnpDw0iLqwgqO6BVKoPo00K6WIDsOzE6OrpE395FzeLgxMn5jVe0dYTa26s5jfFg4VR0nAuwNtrFda1rgmToD6VzVWq3eTPyYAxOwwH5gvT2PiWY7X4fUgJTywp1fivyyL6E+Lb6XvQ0X9AkBeeXZED+p/k+9LcAAwAXm3hBLzoZPAAAAABJRU5ErkJggg== +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http b/node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http similarity index 98% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http rename to node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http index 4f4fadb04..28b1d0e69 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http +++ b/node_modules/multiparty/test/fixture/http/encoding/binaryfile.tar.gz.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Length: 676 - ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Disposition: form-data; name="file"; filename="binaryfile.tar.gz" -Content-Type: application/x-gzip -Content-Transfer-Encoding: base64 - -H4sIAGiNIU8AA+3R0W6CMBQGYK59iobLZantRDG73osUOGqnFNJWM2N897UghG1ZdmWWLf93U/jP4bRAq8q92hJ/dY1J7kQEqyyLq8yXYrp2ltkqkTKXYiEykYc++ZTLVcLEvQ40dXReWcYSV1pdnL/v+6n+R11mjKVG1ZQ+s3TT2FpXqjhQ+hjzE1mnGxNLkgu+7tOKWjIVmVKTC6XL9ZaeXj4VQhwKWzL+cI4zwgQuuhkh3mhTad/Hkssh3im3027X54JnQ360R/M19OT8kC7SEN7Ooi2VvrEfznHQRWzl83gxttZKmzGehzPRW/+W8X+3fvL8sFet9sS6m3EIma02071MU3Uf9KHrmV1/+y8DAAAAAAAAAAAAAAAAAAAAAMB/9A6txIuJACgAAA== ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Length: 676 + +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Disposition: form-data; name="file"; filename="binaryfile.tar.gz" +Content-Type: application/x-gzip +Content-Transfer-Encoding: base64 + +H4sIAGiNIU8AA+3R0W6CMBQGYK59iobLZantRDG73osUOGqnFNJWM2N897UghG1ZdmWWLf93U/jP4bRAq8q92hJ/dY1J7kQEqyyLq8yXYrp2ltkqkTKXYiEykYc++ZTLVcLEvQ40dXReWcYSV1pdnL/v+6n+R11mjKVG1ZQ+s3TT2FpXqjhQ+hjzE1mnGxNLkgu+7tOKWjIVmVKTC6XL9ZaeXj4VQhwKWzL+cI4zwgQuuhkh3mhTad/Hkssh3im3027X54JnQ360R/M19OT8kC7SEN7Ooi2VvrEfznHQRWzl83gxttZKmzGehzPRW/+W8X+3fvL8sFet9sS6m3EIma02071MU3Uf9KHrmV1/+y8DAAAAAAAAAAAAAAAAAAAAAMB/9A6txIuJACgAAA== +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/blank.gif.http b/node_modules/multiparty/test/fixture/http/encoding/blank.gif.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/blank.gif.http rename to node_modules/multiparty/test/fixture/http/encoding/blank.gif.http index 7426f5b65..cf54956dc 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/blank.gif.http +++ b/node_modules/multiparty/test/fixture/http/encoding/blank.gif.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Length: 323 - ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Disposition: form-data; name="file"; filename="blank.gif" -Content-Type: image/gif -Content-Transfer-Encoding: base64 - -R0lGODlhAQABAJH/AP///wAAAMDAwAAAACH5BAEAAAIALAAAAAABAAEAAAICVAEAOw== ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Length: 323 + +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Disposition: form-data; name="file"; filename="blank.gif" +Content-Type: image/gif +Content-Transfer-Encoding: base64 + +R0lGODlhAQABAJH/AP///wAAAMDAwAAAACH5BAEAAAIALAAAAAABAAEAAAICVAEAOw== +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http b/node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http similarity index 99% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http rename to node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http index d08fd37e2..3fd5085e7 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http +++ b/node_modules/multiparty/test/fixture/http/encoding/menu_seperator.png.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Length: 1509 - ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ -Content-Disposition: form-data; name="image"; filename="menu_separator.png" -Content-Type: image/png -Content-Transfer-Encoding: base64 - -iVBORw0KGgoAAAANSUhEUgAAAAIAAAAYCAIAAABfmbuOAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNSBNYWNpbnRvc2giIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MDcxODNBNzJERDcyMTFFMUFBOEVFNDQzOTA0MDJDMjQiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MDcxODNBNzNERDcyMTFFMUFBOEVFNDQzOTA0MDJDMjQiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDowNzE4M0E3MERENzIxMUUxQUE4RUU0NDM5MDQwMkMyNCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDowNzE4M0E3MURENzIxMUUxQUE4RUU0NDM5MDQwMkMyNCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pmvhbb8AAAAXSURBVHjaYnHk9PON8WJiAIPBSwEEGAAPrgG+VozFWgAAAABJRU5ErkJggg== ---\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Length: 1509 + +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ +Content-Disposition: form-data; name="image"; filename="menu_separator.png" +Content-Type: image/png +Content-Transfer-Encoding: base64 + +iVBORw0KGgoAAAANSUhEUgAAAAIAAAAYCAIAAABfmbuOAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNSBNYWNpbnRvc2giIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MDcxODNBNzJERDcyMTFFMUFBOEVFNDQzOTA0MDJDMjQiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MDcxODNBNzNERDcyMTFFMUFBOEVFNDQzOTA0MDJDMjQiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDowNzE4M0E3MERENzIxMUUxQUE4RUU0NDM5MDQwMkMyNCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDowNzE4M0E3MURENzIxMUUxQUE4RUU0NDM5MDQwMkMyNCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pmvhbb8AAAAXSURBVHjaYnHk9PON8WJiAIPBSwEEGAAPrgG+VozFWgAAAABJRU5ErkJggg== +--\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/pf1y5.png.http b/node_modules/multiparty/test/fixture/http/encoding/pf1y5.png.http similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/pf1y5.png.http rename to node_modules/multiparty/test/fixture/http/encoding/pf1y5.png.http diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/plain.txt.http b/node_modules/multiparty/test/fixture/http/encoding/plain.txt.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/plain.txt.http rename to node_modules/multiparty/test/fixture/http/encoding/plain.txt.http index 5e85ad6bb..230b2054e 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/encoding/plain.txt.http +++ b/node_modules/multiparty/test/fixture/http/encoding/plain.txt.http @@ -1,13 +1,13 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ -Content-Length: 221 - -------TLV0SrKD4z1TRxRhAPUvZ -Content-Disposition: form-data; name="file"; filename="plain.txt" -Content-Type: text/plain -Content-Transfer-Encoding: 7bit - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ +Content-Length: 221 + +------TLV0SrKD4z1TRxRhAPUvZ +Content-Disposition: form-data; name="file"; filename="plain.txt" +Content-Type: text/plain +Content-Transfer-Encoding: 7bit + I am a plain text file - -------TLV0SrKD4z1TRxRhAPUvZ-- + +------TLV0SrKD4z1TRxRhAPUvZ-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/filename-name.http b/node_modules/multiparty/test/fixture/http/no-filename/filename-name.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/filename-name.http rename to node_modules/multiparty/test/fixture/http/no-filename/filename-name.http index 43672a329..e449156bd 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/filename-name.http +++ b/node_modules/multiparty/test/fixture/http/no-filename/filename-name.http @@ -1,13 +1,13 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Length: 1000 - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Disposition: form-data; filename="plain.txt"; name="upload" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Length: 1000 + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Disposition: form-data; filename="plain.txt"; name="upload" +Content-Type: text/plain + I am a plain text file - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/generic.http b/node_modules/multiparty/test/fixture/http/no-filename/generic.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/generic.http rename to node_modules/multiparty/test/fixture/http/no-filename/generic.http index e0dee27c0..c051d852c 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/no-filename/generic.http +++ b/node_modules/multiparty/test/fixture/http/no-filename/generic.http @@ -1,13 +1,13 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Length: 1000 - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Disposition: form-data; name="upload"; filename="" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Length: 1000 + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Disposition: form-data; name="upload"; filename="" +Content-Type: text/plain + I am a plain text file - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/crlf.http b/node_modules/multiparty/test/fixture/http/preamble/crlf.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/crlf.http rename to node_modules/multiparty/test/fixture/http/preamble/crlf.http index 1d5f7095b..1357950a8 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/crlf.http +++ b/node_modules/multiparty/test/fixture/http/preamble/crlf.http @@ -1,13 +1,13 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ -Content-Length: 184 - - -------TLV0SrKD4z1TRxRhAPUvZ -Content-Disposition: form-data; name="upload"; filename="plain.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ +Content-Length: 184 + + +------TLV0SrKD4z1TRxRhAPUvZ +Content-Disposition: form-data; name="upload"; filename="plain.txt" +Content-Type: text/plain + I am a plain text file - -------TLV0SrKD4z1TRxRhAPUvZ-- + +------TLV0SrKD4z1TRxRhAPUvZ-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/preamble.http b/node_modules/multiparty/test/fixture/http/preamble/preamble.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/preamble.http rename to node_modules/multiparty/test/fixture/http/preamble/preamble.http index d14d4330f..ab490a360 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/preamble/preamble.http +++ b/node_modules/multiparty/test/fixture/http/preamble/preamble.http @@ -1,13 +1,13 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ -Content-Length: 226 - -This is a preamble which should be ignored -------TLV0SrKD4z1TRxRhAPUvZ -Content-Disposition: form-data; name="upload"; filename="plain.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ +Content-Length: 226 + +This is a preamble which should be ignored +------TLV0SrKD4z1TRxRhAPUvZ +Content-Disposition: form-data; name="upload"; filename="plain.txt" +Content-Type: text/plain + I am a plain text file - -------TLV0SrKD4z1TRxRhAPUvZ-- + +------TLV0SrKD4z1TRxRhAPUvZ-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/info.md b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/info.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/info.md rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/info.md diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http index 4ef391729..6dec0d38c 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-chrome-13.http @@ -1,26 +1,26 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Connection: keep-alive -Referer: http://localhost:8080/ -Content-Length: 383 -Cache-Control: max-age=0 -Origin: http://localhost:8080 -User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.220 Safari/535.1 -Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG -Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 -Accept-Encoding: gzip,deflate,sdch -Accept-Language: en-US,en;q=0.8 -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.3 -Cookie: jqCookieJar_tablesorter=%7B%22showListTable%22%3A%5B%5B5%2C1%5D%2C%5B1%2C0%5D%5D%7D - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Disposition: form-data; name="title" - -Weird filename -------WebKitFormBoundarytyE4wkKlZ5CQJVTG -Content-Disposition: form-data; name="upload"; filename=": \ ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Connection: keep-alive +Referer: http://localhost:8080/ +Content-Length: 383 +Cache-Control: max-age=0 +Origin: http://localhost:8080 +User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.220 Safari/535.1 +Content-Type: multipart/form-data; boundary=----WebKitFormBoundarytyE4wkKlZ5CQJVTG +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 +Accept-Encoding: gzip,deflate,sdch +Accept-Language: en-US,en;q=0.8 +Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.3 +Cookie: jqCookieJar_tablesorter=%7B%22showListTable%22%3A%5B%5B5%2C1%5D%2C%5B1%2C0%5D%5D%7D + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Disposition: form-data; name="title" + +Weird filename +------WebKitFormBoundarytyE4wkKlZ5CQJVTG +Content-Disposition: form-data; name="upload"; filename=": \ ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: text/plain + I am a text file with a funky name! - -------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- + +------WebKitFormBoundarytyE4wkKlZ5CQJVTG-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http index bf49f85f9..76ff2b34a 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-firefox-3.6.http @@ -1,24 +1,24 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.22) Gecko/20110902 Firefox/3.6.22 -Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 -Accept-Language: en-us,en;q=0.5 -Accept-Encoding: gzip,deflate -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 -Keep-Alive: 115 -Connection: keep-alive -Referer: http://localhost:8080/ -Content-Type: multipart/form-data; boundary=---------------------------9849436581144108930470211272 -Content-Length: 438 - ------------------------------9849436581144108930470211272 -Content-Disposition: form-data; name="title" - -Weird filename ------------------------------9849436581144108930470211272 -Content-Disposition: form-data; name="upload"; filename=": \ ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.22) Gecko/20110902 Firefox/3.6.22 +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 +Accept-Language: en-us,en;q=0.5 +Accept-Encoding: gzip,deflate +Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 +Keep-Alive: 115 +Connection: keep-alive +Referer: http://localhost:8080/ +Content-Type: multipart/form-data; boundary=---------------------------9849436581144108930470211272 +Content-Length: 438 + +-----------------------------9849436581144108930470211272 +Content-Disposition: form-data; name="title" + +Weird filename +-----------------------------9849436581144108930470211272 +Content-Disposition: form-data; name="upload"; filename=": \ ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: text/plain + I am a text file with a funky name! - ------------------------------9849436581144108930470211272-- + +-----------------------------9849436581144108930470211272-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http index ff158a43c..b3652d90e 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/osx-safari-5.http @@ -1,23 +1,23 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Origin: http://localhost:8080 -Content-Length: 383 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1 -Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryQJZ1gvhvdgfisJPJ -Accept: application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Referer: http://localhost:8080/ -Accept-Language: en-us -Accept-Encoding: gzip, deflate -Connection: keep-alive - -------WebKitFormBoundaryQJZ1gvhvdgfisJPJ -Content-Disposition: form-data; name="title" - -Weird filename -------WebKitFormBoundaryQJZ1gvhvdgfisJPJ -Content-Disposition: form-data; name="upload"; filename=": \ ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Origin: http://localhost:8080 +Content-Length: 383 +User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1 +Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryQJZ1gvhvdgfisJPJ +Accept: application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 +Referer: http://localhost:8080/ +Accept-Language: en-us +Accept-Encoding: gzip, deflate +Connection: keep-alive + +------WebKitFormBoundaryQJZ1gvhvdgfisJPJ +Content-Disposition: form-data; name="title" + +Weird filename +------WebKitFormBoundaryQJZ1gvhvdgfisJPJ +Content-Disposition: form-data; name="upload"; filename=": \ ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: text/plain + I am a text file with a funky name! - -------WebKitFormBoundaryQJZ1gvhvdgfisJPJ-- + +------WebKitFormBoundaryQJZ1gvhvdgfisJPJ-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http index f0fc533a4..ef8d1d602 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-chrome-12.http @@ -1,24 +1,24 @@ -POST /upload HTTP/1.1 -Host: 192.168.56.1:8080 -Connection: keep-alive -Referer: http://192.168.56.1:8080/ -Content-Length: 344 -Cache-Control: max-age=0 -Origin: http://192.168.56.1:8080 -User-Agent: Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.122 Safari/534.30 -Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryEvqBNplR3ByrwQPa -Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 -Accept-Encoding: gzip,deflate,sdch -Accept-Language: de-DE,de;q=0.8,en-US;q=0.6,en;q=0.4 -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.3 - -------WebKitFormBoundaryEvqBNplR3ByrwQPa -Content-Disposition: form-data; name="title" - -Weird filename -------WebKitFormBoundaryEvqBNplR3ByrwQPa -Content-Disposition: form-data; name="upload"; filename=" ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: text/plain - - -------WebKitFormBoundaryEvqBNplR3ByrwQPa-- +POST /upload HTTP/1.1 +Host: 192.168.56.1:8080 +Connection: keep-alive +Referer: http://192.168.56.1:8080/ +Content-Length: 344 +Cache-Control: max-age=0 +Origin: http://192.168.56.1:8080 +User-Agent: Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.742.122 Safari/534.30 +Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryEvqBNplR3ByrwQPa +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 +Accept-Encoding: gzip,deflate,sdch +Accept-Language: de-DE,de;q=0.8,en-US;q=0.6,en;q=0.4 +Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.3 + +------WebKitFormBoundaryEvqBNplR3ByrwQPa +Content-Disposition: form-data; name="title" + +Weird filename +------WebKitFormBoundaryEvqBNplR3ByrwQPa +Content-Disposition: form-data; name="upload"; filename=" ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: text/plain + + +------WebKitFormBoundaryEvqBNplR3ByrwQPa-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http index 2e2c61c7f..4befdc711 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-7.http @@ -1,22 +1,22 @@ -POST /upload HTTP/1.1 -Accept: image/gif, image/jpeg, image/pjpeg, image/pjpeg, application/x-shockwave-flash, */* -Referer: http://192.168.56.1:8080/ -Accept-Language: de -User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1) -Content-Type: multipart/form-data; boundary=---------------------------7db1fe232017c -Accept-Encoding: gzip, deflate -Host: 192.168.56.1:8080 -Content-Length: 368 -Connection: Keep-Alive -Cache-Control: no-cache - ------------------------------7db1fe232017c -Content-Disposition: form-data; name="title" - -Weird filename ------------------------------7db1fe232017c -Content-Disposition: form-data; name="upload"; filename=" ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: application/octet-stream - - ------------------------------7db1fe232017c-- +POST /upload HTTP/1.1 +Accept: image/gif, image/jpeg, image/pjpeg, image/pjpeg, application/x-shockwave-flash, */* +Referer: http://192.168.56.1:8080/ +Accept-Language: de +User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1) +Content-Type: multipart/form-data; boundary=---------------------------7db1fe232017c +Accept-Encoding: gzip, deflate +Host: 192.168.56.1:8080 +Content-Length: 368 +Connection: Keep-Alive +Cache-Control: no-cache + +-----------------------------7db1fe232017c +Content-Disposition: form-data; name="title" + +Weird filename +-----------------------------7db1fe232017c +Content-Disposition: form-data; name="upload"; filename=" ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: application/octet-stream + + +-----------------------------7db1fe232017c-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http index e2b94fae2..9c1c53305 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-ie-8.http @@ -1,22 +1,22 @@ -POST /upload HTTP/1.1 -Accept: image/gif, image/jpeg, image/pjpeg, image/pjpeg, application/x-shockwave-flash, */* -Referer: http://192.168.56.1:8080/ -Accept-Language: de -User-Agent: Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0) -Content-Type: multipart/form-data; boundary=---------------------------7db3a8372017c -Accept-Encoding: gzip, deflate -Host: 192.168.56.1:8080 -Content-Length: 368 -Connection: Keep-Alive -Cache-Control: no-cache - ------------------------------7db3a8372017c -Content-Disposition: form-data; name="title" - -Weird filename ------------------------------7db3a8372017c -Content-Disposition: form-data; name="upload"; filename=" ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: application/octet-stream - - ------------------------------7db3a8372017c-- +POST /upload HTTP/1.1 +Accept: image/gif, image/jpeg, image/pjpeg, image/pjpeg, application/x-shockwave-flash, */* +Referer: http://192.168.56.1:8080/ +Accept-Language: de +User-Agent: Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0) +Content-Type: multipart/form-data; boundary=---------------------------7db3a8372017c +Accept-Encoding: gzip, deflate +Host: 192.168.56.1:8080 +Content-Length: 368 +Connection: Keep-Alive +Cache-Control: no-cache + +-----------------------------7db3a8372017c +Content-Disposition: form-data; name="title" + +Weird filename +-----------------------------7db3a8372017c +Content-Disposition: form-data; name="upload"; filename=" ? % * | " < > . ☃ ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: application/octet-stream + + +-----------------------------7db3a8372017c-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http rename to node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http index 6379ac017..2b7bacb52 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http +++ b/node_modules/multiparty/test/fixture/http/special-chars-in-filename/xp-safari-5.http @@ -1,22 +1,22 @@ -POST /upload HTTP/1.1 -Host: 192.168.56.1:8080 -Referer: http://192.168.56.1:8080/ -Accept: application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Accept-Language: en-US -Origin: http://192.168.56.1:8080 -User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4 -Accept-Encoding: gzip, deflate -Content-Type: multipart/form-data; boundary=----WebKitFormBoundarykmaWSUbu697WN9TM -Content-Length: 344 -Connection: keep-alive - -------WebKitFormBoundarykmaWSUbu697WN9TM -Content-Disposition: form-data; name="title" - -Weird filename -------WebKitFormBoundarykmaWSUbu697WN9TM -Content-Disposition: form-data; name="upload"; filename=" ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" -Content-Type: text/plain - - -------WebKitFormBoundarykmaWSUbu697WN9TM-- +POST /upload HTTP/1.1 +Host: 192.168.56.1:8080 +Referer: http://192.168.56.1:8080/ +Accept: application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 +Accept-Language: en-US +Origin: http://192.168.56.1:8080 +User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.19.4 (KHTML, like Gecko) Version/5.0.3 Safari/533.19.4 +Accept-Encoding: gzip, deflate +Content-Type: multipart/form-data; boundary=----WebKitFormBoundarykmaWSUbu697WN9TM +Content-Length: 344 +Connection: keep-alive + +------WebKitFormBoundarykmaWSUbu697WN9TM +Content-Disposition: form-data; name="title" + +Weird filename +------WebKitFormBoundarykmaWSUbu697WN9TM +Content-Disposition: form-data; name="upload"; filename=" ? % * | %22 < > . ? ; ' @ # $ ^ & ( ) - _ = + { } [ ] ` ~.txt" +Content-Type: text/plain + + +------WebKitFormBoundarykmaWSUbu697WN9TM-- diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http b/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http rename to node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http index 282689032..31ea39594 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http +++ b/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens1.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ -Content-Length: 178 - -------TLV0SrKD4z1TRxRhAPUvZ -Content-Disposition: form-data; name="upload"; filename="plain.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ +Content-Length: 178 + +------TLV0SrKD4z1TRxRhAPUvZ +Content-Disposition: form-data; name="upload"; filename="plain.txt" +Content-Type: text/plain + I am a plain text file - -------TLV0SrKD4z1TRxRhAPUvZ + +------TLV0SrKD4z1TRxRhAPUvZ diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http b/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http similarity index 96% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http rename to node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http index 8e1819471..515f519c2 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http +++ b/node_modules/multiparty/test/fixture/http/workarounds/missing-hyphens2.http @@ -1,12 +1,12 @@ -POST /upload HTTP/1.1 -Host: localhost:8080 -Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ -Content-Length: 180 - -------TLV0SrKD4z1TRxRhAPUvZ -Content-Disposition: form-data; name="upload"; filename="plain.txt" -Content-Type: text/plain - +POST /upload HTTP/1.1 +Host: localhost:8080 +Content-Type: multipart/form-data; boundary=----TLV0SrKD4z1TRxRhAPUvZ +Content-Length: 180 + +------TLV0SrKD4z1TRxRhAPUvZ +Content-Disposition: form-data; name="upload"; filename="plain.txt" +Content-Type: text/plain + I am a plain text file - -------TLV0SrKD4z1TRxRhAPUvZ + +------TLV0SrKD4z1TRxRhAPUvZ diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/encoding.js b/node_modules/multiparty/test/fixture/js/encoding.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/encoding.js rename to node_modules/multiparty/test/fixture/js/encoding.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/no-filename.js b/node_modules/multiparty/test/fixture/js/no-filename.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/no-filename.js rename to node_modules/multiparty/test/fixture/js/no-filename.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/preamble.js b/node_modules/multiparty/test/fixture/js/preamble.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/preamble.js rename to node_modules/multiparty/test/fixture/js/preamble.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/special-chars-in-filename.js b/node_modules/multiparty/test/fixture/js/special-chars-in-filename.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/special-chars-in-filename.js rename to node_modules/multiparty/test/fixture/js/special-chars-in-filename.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/workarounds.js b/node_modules/multiparty/test/fixture/js/workarounds.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/js/workarounds.js rename to node_modules/multiparty/test/fixture/js/workarounds.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/multi_video.upload b/node_modules/multiparty/test/fixture/multi_video.upload similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/multi_video.upload rename to node_modules/multiparty/test/fixture/multi_video.upload diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/multipart.js b/node_modules/multiparty/test/fixture/multipart.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/fixture/multipart.js rename to node_modules/multiparty/test/fixture/multipart.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/record.js b/node_modules/multiparty/test/record.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/record.js rename to node_modules/multiparty/test/record.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-connection-aborted.js b/node_modules/multiparty/test/standalone/test-connection-aborted.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-connection-aborted.js rename to node_modules/multiparty/test/standalone/test-connection-aborted.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-content-transfer-encoding.js b/node_modules/multiparty/test/standalone/test-content-transfer-encoding.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-content-transfer-encoding.js rename to node_modules/multiparty/test/standalone/test-content-transfer-encoding.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-invalid.js b/node_modules/multiparty/test/standalone/test-invalid.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-invalid.js rename to node_modules/multiparty/test/standalone/test-invalid.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-15.js b/node_modules/multiparty/test/standalone/test-issue-15.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-15.js rename to node_modules/multiparty/test/standalone/test-issue-15.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-19.js b/node_modules/multiparty/test/standalone/test-issue-19.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-19.js rename to node_modules/multiparty/test/standalone/test-issue-19.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-21.js b/node_modules/multiparty/test/standalone/test-issue-21.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-21.js rename to node_modules/multiparty/test/standalone/test-issue-21.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-4.js b/node_modules/multiparty/test/standalone/test-issue-4.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-4.js rename to node_modules/multiparty/test/standalone/test-issue-4.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-46.js b/node_modules/multiparty/test/standalone/test-issue-46.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-46.js rename to node_modules/multiparty/test/standalone/test-issue-46.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-5.js b/node_modules/multiparty/test/standalone/test-issue-5.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/standalone/test-issue-5.js rename to node_modules/multiparty/test/standalone/test-issue-5.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/test/test.js b/node_modules/multiparty/test/test.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/test/test.js rename to node_modules/multiparty/test/test.js diff --git a/node_modules/mongoose/node_modules/muri/.npmignore b/node_modules/muri/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/muri/.npmignore rename to node_modules/muri/.npmignore diff --git a/node_modules/mongoose/node_modules/muri/.travis.yml b/node_modules/muri/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/muri/.travis.yml rename to node_modules/muri/.travis.yml diff --git a/node_modules/mongoose/node_modules/muri/History.md b/node_modules/muri/History.md similarity index 100% rename from node_modules/mongoose/node_modules/muri/History.md rename to node_modules/muri/History.md diff --git a/node_modules/mongoose/node_modules/muri/LICENSE b/node_modules/muri/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/muri/LICENSE rename to node_modules/muri/LICENSE diff --git a/node_modules/mongoose/node_modules/muri/Makefile b/node_modules/muri/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/muri/Makefile rename to node_modules/muri/Makefile diff --git a/node_modules/mongoose/node_modules/muri/README.md b/node_modules/muri/README.md similarity index 100% rename from node_modules/mongoose/node_modules/muri/README.md rename to node_modules/muri/README.md diff --git a/node_modules/mongoose/node_modules/muri/index.js b/node_modules/muri/index.js similarity index 100% rename from node_modules/mongoose/node_modules/muri/index.js rename to node_modules/muri/index.js diff --git a/node_modules/mongoose/node_modules/muri/lib/index.js b/node_modules/muri/lib/index.js similarity index 100% rename from node_modules/mongoose/node_modules/muri/lib/index.js rename to node_modules/muri/lib/index.js diff --git a/node_modules/muri/package.json b/node_modules/muri/package.json new file mode 100644 index 000000000..f42041fdc --- /dev/null +++ b/node_modules/muri/package.json @@ -0,0 +1,55 @@ +{ + "_from": "muri@0.3.1", + "_id": "muri@0.3.1", + "_inBundle": false, + "_integrity": "sha1-hhiJxchX8aQ3AL7oXVBzH2FyfJo=", + "_location": "/muri", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "muri@0.3.1", + "name": "muri", + "escapedName": "muri", + "rawSpec": "0.3.1", + "saveSpec": null, + "fetchSpec": "0.3.1" + }, + "_requiredBy": [ + "/mongoose" + ], + "_resolved": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz", + "_shasum": "861889c5c857f1a43700bee85d50731f61727c9a", + "_spec": "muri@0.3.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/muri/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "MongoDB URI parser", + "devDependencies": { + "mocha": "1.6.0" + }, + "homepage": "https://github.com/aheckmann/muri#readme", + "keywords": [ + "mongodb", + "uri", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "muri", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/muri.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.3.1" +} diff --git a/node_modules/mongoose/node_modules/muri/test/index.js b/node_modules/muri/test/index.js similarity index 100% rename from node_modules/mongoose/node_modules/muri/test/index.js rename to node_modules/muri/test/index.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/LICENSE b/node_modules/negotiator/LICENSE similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/LICENSE rename to node_modules/negotiator/LICENSE diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/examples/accept.js b/node_modules/negotiator/examples/accept.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/examples/accept.js rename to node_modules/negotiator/examples/accept.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/examples/charset.js b/node_modules/negotiator/examples/charset.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/examples/charset.js rename to node_modules/negotiator/examples/charset.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/examples/encoding.js b/node_modules/negotiator/examples/encoding.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/examples/encoding.js rename to node_modules/negotiator/examples/encoding.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/examples/language.js b/node_modules/negotiator/examples/language.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/examples/language.js rename to node_modules/negotiator/examples/language.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/lib/charset.js b/node_modules/negotiator/lib/charset.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/lib/charset.js rename to node_modules/negotiator/lib/charset.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/lib/encoding.js rename to node_modules/negotiator/lib/encoding.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/lib/language.js rename to node_modules/negotiator/lib/language.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/lib/mediaType.js rename to node_modules/negotiator/lib/mediaType.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/lib/negotiator.js b/node_modules/negotiator/lib/negotiator.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/lib/negotiator.js rename to node_modules/negotiator/lib/negotiator.js diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json new file mode 100644 index 000000000..e9eb65fc1 --- /dev/null +++ b/node_modules/negotiator/package.json @@ -0,0 +1,71 @@ +{ + "_from": "negotiator@0.3.0", + "_id": "negotiator@0.3.0", + "_inBundle": false, + "_integrity": "sha1-cG1pLv7d9XTVfqn7GriaT6fuj2A=", + "_location": "/negotiator", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "negotiator@0.3.0", + "name": "negotiator", + "escapedName": "negotiator", + "rawSpec": "0.3.0", + "saveSpec": null, + "fetchSpec": "0.3.0" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", + "_shasum": "706d692efeddf574d57ea9fb1ab89a4fa7ee8f60", + "_spec": "negotiator@0.3.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "Federico Romero", + "email": "federico.romero@outboxlabs.com" + }, + "bugs": { + "url": "https://github.com/federomero/negotiator/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "HTTP content negotiation", + "devDependencies": { + "nodeunit": "0.6.x" + }, + "engine": "node >= 0.6", + "engines": { + "node": "*" + }, + "homepage": "https://github.com/federomero/negotiator#readme", + "keywords": [ + "http", + "content negotiation", + "accept", + "accept-language", + "accept-encoding", + "accept-charset" + ], + "license": "MIT", + "main": "lib/negotiator.js", + "name": "negotiator", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git://github.com/federomero/negotiator.git" + }, + "scripts": { + "test": "nodeunit test" + }, + "version": "0.3.0" +} diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/readme.md b/node_modules/negotiator/readme.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/readme.md rename to node_modules/negotiator/readme.md diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/test/charset.js b/node_modules/negotiator/test/charset.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/test/charset.js rename to node_modules/negotiator/test/charset.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/test/encoding.js b/node_modules/negotiator/test/encoding.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/test/encoding.js rename to node_modules/negotiator/test/encoding.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/test/language.js b/node_modules/negotiator/test/language.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/test/language.js rename to node_modules/negotiator/test/language.js diff --git a/node_modules/express/node_modules/connect/node_modules/negotiator/test/mediaType.js b/node_modules/negotiator/test/mediaType.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/negotiator/test/mediaType.js rename to node_modules/negotiator/test/mediaType.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/.travis.yml b/node_modules/optimist/.travis.yml similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/.travis.yml rename to node_modules/optimist/.travis.yml diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/LICENSE b/node_modules/optimist/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/LICENSE rename to node_modules/optimist/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/bool.js b/node_modules/optimist/example/bool.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/bool.js rename to node_modules/optimist/example/bool.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/boolean_double.js b/node_modules/optimist/example/boolean_double.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/boolean_double.js rename to node_modules/optimist/example/boolean_double.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/boolean_single.js b/node_modules/optimist/example/boolean_single.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/boolean_single.js rename to node_modules/optimist/example/boolean_single.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/default_hash.js b/node_modules/optimist/example/default_hash.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/default_hash.js rename to node_modules/optimist/example/default_hash.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/default_singles.js b/node_modules/optimist/example/default_singles.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/default_singles.js rename to node_modules/optimist/example/default_singles.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/divide.js b/node_modules/optimist/example/divide.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/divide.js rename to node_modules/optimist/example/divide.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count.js b/node_modules/optimist/example/line_count.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count.js rename to node_modules/optimist/example/line_count.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count_options.js b/node_modules/optimist/example/line_count_options.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count_options.js rename to node_modules/optimist/example/line_count_options.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count_wrap.js b/node_modules/optimist/example/line_count_wrap.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/line_count_wrap.js rename to node_modules/optimist/example/line_count_wrap.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/nonopt.js b/node_modules/optimist/example/nonopt.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/nonopt.js rename to node_modules/optimist/example/nonopt.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/reflect.js b/node_modules/optimist/example/reflect.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/reflect.js rename to node_modules/optimist/example/reflect.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/short.js b/node_modules/optimist/example/short.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/short.js rename to node_modules/optimist/example/short.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/string.js b/node_modules/optimist/example/string.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/string.js rename to node_modules/optimist/example/string.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/usage-options.js b/node_modules/optimist/example/usage-options.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/usage-options.js rename to node_modules/optimist/example/usage-options.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/xup.js b/node_modules/optimist/example/xup.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/example/xup.js rename to node_modules/optimist/example/xup.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/index.js b/node_modules/optimist/index.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/index.js rename to node_modules/optimist/index.js diff --git a/node_modules/optimist/package.json b/node_modules/optimist/package.json new file mode 100644 index 000000000..dd9ce1b80 --- /dev/null +++ b/node_modules/optimist/package.json @@ -0,0 +1,68 @@ +{ + "_from": "optimist@~0.3", + "_id": "optimist@0.3.7", + "_inBundle": false, + "_integrity": "sha1-yQlBrVnkJzMokjB00s8ufLxuwNk=", + "_location": "/optimist", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "optimist@~0.3", + "name": "optimist", + "escapedName": "optimist", + "rawSpec": "~0.3", + "saveSpec": null, + "fetchSpec": "~0.3" + }, + "_requiredBy": [ + "/handlebars", + "/uglify-js" + ], + "_resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "_shasum": "c90941ad59e4273328923074d2cf2e7cbc6ec0d9", + "_spec": "optimist@~0.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/handlebars", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/node-optimist/issues" + }, + "bundleDependencies": false, + "dependencies": { + "wordwrap": "~0.0.2" + }, + "deprecated": false, + "description": "Light-weight option parsing with an argv hash. No optstrings attached.", + "devDependencies": { + "hashish": "~0.0.4", + "tap": "~0.4.0" + }, + "engine": { + "node": ">=0.4" + }, + "homepage": "https://github.com/substack/node-optimist#readme", + "keywords": [ + "argument", + "args", + "option", + "parser", + "parsing", + "cli", + "command" + ], + "license": "MIT/X11", + "main": "./index.js", + "name": "optimist", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/substack/node-optimist.git" + }, + "scripts": { + "test": "tap ./test/*.js" + }, + "version": "0.3.7" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/readme.markdown b/node_modules/optimist/readme.markdown similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/readme.markdown rename to node_modules/optimist/readme.markdown diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_.js b/node_modules/optimist/test/_.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_.js rename to node_modules/optimist/test/_.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_/argv.js b/node_modules/optimist/test/_/argv.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_/argv.js rename to node_modules/optimist/test/_/argv.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_/bin.js b/node_modules/optimist/test/_/bin.js old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/_/bin.js rename to node_modules/optimist/test/_/bin.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/parse.js b/node_modules/optimist/test/parse.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/parse.js rename to node_modules/optimist/test/parse.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/usage.js b/node_modules/optimist/test/usage.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/test/usage.js rename to node_modules/optimist/test/usage.js diff --git a/node_modules/express/node_modules/send/.npmignore b/node_modules/pause/.npmignore similarity index 100% rename from node_modules/express/node_modules/send/.npmignore rename to node_modules/pause/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/pause/History.md b/node_modules/pause/History.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/pause/History.md rename to node_modules/pause/History.md diff --git a/node_modules/express/node_modules/cookie-signature/Makefile b/node_modules/pause/Makefile similarity index 100% rename from node_modules/express/node_modules/cookie-signature/Makefile rename to node_modules/pause/Makefile diff --git a/node_modules/express/node_modules/connect/node_modules/pause/Readme.md b/node_modules/pause/Readme.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/pause/Readme.md rename to node_modules/pause/Readme.md diff --git a/node_modules/express/node_modules/connect/node_modules/pause/index.js b/node_modules/pause/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/pause/index.js rename to node_modules/pause/index.js diff --git a/node_modules/pause/package.json b/node_modules/pause/package.json new file mode 100644 index 000000000..2df8fbc79 --- /dev/null +++ b/node_modules/pause/package.json @@ -0,0 +1,41 @@ +{ + "_from": "pause@0.0.1", + "_id": "pause@0.0.1", + "_inBundle": false, + "_integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=", + "_location": "/pause", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "pause@0.0.1", + "name": "pause", + "escapedName": "pause", + "rawSpec": "0.0.1", + "saveSpec": null, + "fetchSpec": "0.0.1" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "_shasum": "1d408b3fdb76923b9543d96fb4c9dfd535d9cb5d", + "_spec": "pause@0.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Pause streams...", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [], + "main": "index", + "name": "pause", + "version": "0.0.1" +} diff --git a/node_modules/express/node_modules/connect/node_modules/qs/.gitmodules b/node_modules/qs/.gitmodules similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/qs/.gitmodules rename to node_modules/qs/.gitmodules diff --git a/node_modules/express/node_modules/connect/node_modules/qs/.npmignore b/node_modules/qs/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/qs/.npmignore rename to node_modules/qs/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/qs/Readme.md b/node_modules/qs/Readme.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/qs/Readme.md rename to node_modules/qs/Readme.md diff --git a/node_modules/express/node_modules/connect/node_modules/qs/index.js b/node_modules/qs/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/qs/index.js rename to node_modules/qs/index.js diff --git a/node_modules/qs/package.json b/node_modules/qs/package.json new file mode 100644 index 000000000..af61bf77a --- /dev/null +++ b/node_modules/qs/package.json @@ -0,0 +1,59 @@ +{ + "_from": "qs@0.6.6", + "_id": "qs@0.6.6", + "_inBundle": false, + "_integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=", + "_location": "/qs", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "qs@0.6.6", + "name": "qs", + "escapedName": "qs", + "rawSpec": "0.6.6", + "saveSpec": null, + "fetchSpec": "0.6.6" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "_shasum": "6e015098ff51968b8a3c819001d5f2c89bc4b107", + "_spec": "qs@0.6.6", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bugs": { + "url": "https://github.com/visionmedia/node-querystring/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "querystring parser", + "devDependencies": { + "expect.js": "*", + "mocha": "*" + }, + "engines": { + "node": "*" + }, + "homepage": "https://github.com/visionmedia/node-querystring#readme", + "keywords": [ + "query string", + "parser", + "component" + ], + "main": "index", + "name": "qs", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/node-querystring.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.6.6" +} diff --git a/node_modules/express/node_modules/range-parser/.npmignore b/node_modules/range-parser/.npmignore similarity index 100% rename from node_modules/express/node_modules/range-parser/.npmignore rename to node_modules/range-parser/.npmignore diff --git a/node_modules/express/node_modules/range-parser/History.md b/node_modules/range-parser/History.md similarity index 100% rename from node_modules/express/node_modules/range-parser/History.md rename to node_modules/range-parser/History.md diff --git a/node_modules/express/node_modules/range-parser/Makefile b/node_modules/range-parser/Makefile similarity index 100% rename from node_modules/express/node_modules/range-parser/Makefile rename to node_modules/range-parser/Makefile diff --git a/node_modules/express/node_modules/range-parser/Readme.md b/node_modules/range-parser/Readme.md similarity index 100% rename from node_modules/express/node_modules/range-parser/Readme.md rename to node_modules/range-parser/Readme.md diff --git a/node_modules/express/node_modules/range-parser/index.js b/node_modules/range-parser/index.js similarity index 100% rename from node_modules/express/node_modules/range-parser/index.js rename to node_modules/range-parser/index.js diff --git a/node_modules/range-parser/package.json b/node_modules/range-parser/package.json new file mode 100644 index 000000000..7a806ce27 --- /dev/null +++ b/node_modules/range-parser/package.json @@ -0,0 +1,45 @@ +{ + "_from": "range-parser@0.0.4", + "_id": "range-parser@0.0.4", + "_inBundle": false, + "_integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=", + "_location": "/range-parser", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "range-parser@0.0.4", + "name": "range-parser", + "escapedName": "range-parser", + "rawSpec": "0.0.4", + "saveSpec": null, + "fetchSpec": "0.0.4" + }, + "_requiredBy": [ + "/express", + "/send" + ], + "_resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "_shasum": "c0427ffef51c10acba0782a46c9602e744ff620b", + "_spec": "range-parser@0.0.4", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Range header field string parser", + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "engines": { + "node": "*" + }, + "main": "index.js", + "name": "range-parser", + "version": "0.0.4" +} diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/.npmignore b/node_modules/raw-body/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/raw-body/.npmignore rename to node_modules/raw-body/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/.travis.yml b/node_modules/raw-body/.travis.yml similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/raw-body/.travis.yml rename to node_modules/raw-body/.travis.yml diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/Makefile b/node_modules/raw-body/Makefile similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/raw-body/Makefile rename to node_modules/raw-body/Makefile diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/README.md b/node_modules/raw-body/README.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/raw-body/README.md rename to node_modules/raw-body/README.md diff --git a/node_modules/express/node_modules/connect/node_modules/raw-body/index.js b/node_modules/raw-body/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/raw-body/index.js rename to node_modules/raw-body/index.js diff --git a/node_modules/raw-body/package.json b/node_modules/raw-body/package.json new file mode 100644 index 000000000..5099eefcc --- /dev/null +++ b/node_modules/raw-body/package.json @@ -0,0 +1,62 @@ +{ + "_from": "raw-body@1.1.2", + "_id": "raw-body@1.1.2", + "_inBundle": false, + "_integrity": "sha1-x0swBN6l3v0WlhcRBqx0DsMdYr4=", + "_location": "/raw-body", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "raw-body@1.1.2", + "name": "raw-body", + "escapedName": "raw-body", + "rawSpec": "1.1.2", + "saveSpec": null, + "fetchSpec": "1.1.2" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", + "_shasum": "c74b3004dea5defd1696171106ac740ec31d62be", + "_spec": "raw-body@1.1.2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "author": { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + }, + "bugs": { + "url": "https://github.com/stream-utils/raw-body/issues" + }, + "bundleDependencies": false, + "dependencies": { + "bytes": "~0.2.1" + }, + "deprecated": false, + "description": "Get and validate the raw body of a readable stream.", + "devDependencies": { + "assert-tap": "~0.1.4", + "co": "2", + "gnode": "~0.0.4", + "mocha": "~1.14.0", + "readable-stream": "~1.0.17", + "request": "~2.27.0", + "through": "~2.3.4" + }, + "engines": { + "node": ">= 0.8.0" + }, + "homepage": "https://github.com/stream-utils/raw-body#readme", + "license": "MIT", + "name": "raw-body", + "repository": { + "type": "git", + "url": "git+https://github.com/stream-utils/raw-body.git" + }, + "scripts": { + "test": "NODE=gnode make test && node ./test/acceptance.js" + }, + "version": "1.1.2" +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/.npmignore b/node_modules/readable-stream/.npmignore similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/.npmignore rename to node_modules/readable-stream/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/LICENSE rename to node_modules/readable-stream/LICENSE diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 000000000..e46b82390 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,15 @@ +# readable-stream + +***Node-core streams for userland*** + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png&months=6&height=3)](https://nodei.co/npm/readable-stream/) + +This package is a mirror of the Streams2 and Streams3 implementations in Node-core. + +If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core. + +**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12. + +**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"` + diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/duplex.js rename to node_modules/readable-stream/duplex.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/float.patch b/node_modules/readable-stream/float.patch similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/float.patch rename to node_modules/readable-stream/float.patch diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js similarity index 84% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_duplex.js rename to node_modules/readable-stream/lib/_stream_duplex.js index a2e0d8e0d..b513d61a9 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_duplex.js +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -25,13 +25,27 @@ // Writable. module.exports = Duplex; -var util = require('util'); + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +} +/**/ + + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + var Readable = require('./_stream_readable'); var Writable = require('./_stream_writable'); util.inherits(Duplex, Readable); -Object.keys(Writable.prototype).forEach(function(method) { +forEach(objectKeys(Writable.prototype), function(method) { if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; }); @@ -67,3 +81,9 @@ function onend() { // But allow more writes to happen in this tick. process.nextTick(this.end.bind(this)); } + +function forEach (xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js similarity index 93% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_passthrough.js rename to node_modules/readable-stream/lib/_stream_passthrough.js index 330c247d4..895ca50a1 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_passthrough.js +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -26,7 +26,12 @@ module.exports = PassThrough; var Transform = require('./_stream_transform'); -var util = require('util'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + util.inherits(PassThrough, Transform); function PassThrough(options) { diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js similarity index 95% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_readable.js rename to node_modules/readable-stream/lib/_stream_readable.js index 2d00efb43..19ab35889 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_readable.js +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -20,41 +20,51 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + Readable.ReadableState = ReadableState; var EE = require('events').EventEmitter; + +/**/ if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { return emitter.listeners(type).length; }; - -if (!global.setImmediate) global.setImmediate = function setImmediate(fn) { - return setTimeout(fn, 0); -}; -if (!global.clearImmediate) global.clearImmediate = function clearImmediate(i) { - return clearTimeout(i); -}; +/**/ var Stream = require('stream'); -var util = require('util'); -if (!util.isUndefined) { - var utilIs = require('core-util-is'); - for (var f in utilIs) { - util[f] = utilIs[f]; - } -} + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + var StringDecoder; -var debug; -if (util.debuglog) - debug = util.debuglog('stream'); -else try { - debug = require('debuglog')('stream'); -} catch (er) { - debug = function() {}; + + +/**/ +var debug = require('util'); +if (debug && debug.debuglog) { + debug = debug.debuglog('stream'); +} else { + debug = function () {}; } +/**/ + util.inherits(Readable, Stream); function ReadableState(options, stream) { + var Duplex = require('./_stream_duplex'); + options = options || {}; // the point at which it stops calling _read() to fill the buffer @@ -92,6 +102,9 @@ function ReadableState(options, stream) { // make all the buffer merging and length checks go away this.objectMode = !!options.objectMode; + if (stream instanceof Duplex) + this.objectMode = this.objectMode || !!options.readableObjectMode; + // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. @@ -111,13 +124,15 @@ function ReadableState(options, stream) { this.encoding = null; if (options.encoding) { if (!StringDecoder) - StringDecoder = require('string_decoder').StringDecoder; + StringDecoder = require('string_decoder/').StringDecoder; this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } function Readable(options) { + var Duplex = require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); @@ -219,7 +234,7 @@ function needMoreData(state) { // backwards compatibility. Readable.prototype.setEncoding = function(enc) { if (!StringDecoder) - StringDecoder = require('string_decoder').StringDecoder; + StringDecoder = require('string_decoder/').StringDecoder; this._readableState.decoder = new StringDecoder(enc); this._readableState.encoding = enc; return this; @@ -397,8 +412,7 @@ function chunkInvalid(state, chunk) { if (!util.isBuffer(chunk) && !util.isString(chunk) && !util.isNullOrUndefined(chunk) && - !state.objectMode && - !er) { + !state.objectMode) { er = new TypeError('Invalid non-string/buffer chunk'); } return er; @@ -406,7 +420,7 @@ function chunkInvalid(state, chunk) { function onEofChunk(stream, state) { - if (state.decoder && !state.ended && state.decoder.end) { + if (state.decoder && !state.ended) { var chunk = state.decoder.end(); if (chunk && chunk.length) { state.buffer.push(chunk); @@ -577,7 +591,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) { // is attached before any userland ones. NEVER DO THIS. if (!dest._events || !dest._events.error) dest.on('error', onerror); - else if (Array.isArray(dest._events.error)) + else if (isArray(dest._events.error)) dest._events.error.unshift(onerror); else dest._events.error = [onerror, dest._events.error]; @@ -669,7 +683,7 @@ Readable.prototype.unpipe = function(dest) { } // try to find the right one. - var i = state.pipes.indexOf(dest); + var i = indexOf(state.pipes, dest); if (i === -1) return this; @@ -814,7 +828,7 @@ Readable.prototype.wrap = function(stream) { // proxy certain important events. var events = ['error', 'close', 'destroy', 'pause', 'resume']; - events.forEach(function(ev) { + forEach(events, function(ev) { stream.on(ev, self.emit.bind(self, ev)); }); @@ -922,3 +936,16 @@ function endReadable(stream) { }); } } + +function forEach (xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf (xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js similarity index 97% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_transform.js rename to node_modules/readable-stream/lib/_stream_transform.js index b0caf57d8..905c5e450 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_transform.js +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -65,13 +65,12 @@ module.exports = Transform; var Duplex = require('./_stream_duplex'); -var util = require('util'); -if (!util.isUndefined) { - var utilIs = require('core-util-is'); - for (var f in utilIs) { - util[f] = utilIs[f]; - } -} + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + util.inherits(Transform, Duplex); diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js similarity index 94% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_writable.js rename to node_modules/readable-stream/lib/_stream_writable.js index 1dfca70d8..db8539cd5 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/lib/_stream_writable.js +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -24,15 +24,19 @@ // the drain event emission and buffering. module.exports = Writable; + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + Writable.WritableState = WritableState; -var util = require('util'); -if (!util.isUndefined) { - var utilIs = require('core-util-is'); - for (var f in utilIs) { - util[f] = utilIs[f]; - } -} + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + var Stream = require('stream'); util.inherits(Writable, Stream); @@ -44,6 +48,8 @@ function WriteReq(chunk, encoding, cb) { } function WritableState(options, stream) { + var Duplex = require('./_stream_duplex'); + options = options || {}; // the point at which write() starts returning false @@ -57,6 +63,9 @@ function WritableState(options, stream) { // contains buffers or objects. this.objectMode = !!options.objectMode; + if (stream instanceof Duplex) + this.objectMode = this.objectMode || !!options.writableObjectMode; + // cast to ints. this.highWaterMark = ~~this.highWaterMark; @@ -121,12 +130,17 @@ function WritableState(options, stream) { // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; } function Writable(options) { + var Duplex = require('./_stream_duplex'); + // Writable ctor is applied to Duplexes, though they're not // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof require('./_stream_duplex'))) + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); this._writableState = new WritableState(options, this); @@ -242,7 +256,9 @@ function writeOrBuffer(stream, state, chunk, encoding, cb) { state.length += len; var ret = state.length < state.highWaterMark; - state.needDrain = !ret; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) + state.needDrain = true; if (state.writing || state.corked) state.buffer.push(new WriteReq(chunk, encoding, cb)); @@ -275,6 +291,7 @@ function onwriteError(stream, state, sync, er, cb) { cb(er); } + stream._writableState.errorEmitted = true; stream.emit('error', er); } diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 000000000..f1354a9d9 --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,66 @@ +{ + "_from": "readable-stream@~1.1.9", + "_id": "readable-stream@1.1.14", + "_inBundle": false, + "_integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "_location": "/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@~1.1.9", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "~1.1.9", + "saveSpec": null, + "fetchSpec": "~1.1.9" + }, + "_requiredBy": [ + "/multiparty", + "/stream-counter" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "_shasum": "7cf4c54ef648e3813084c636dd2079e166c081d9", + "_spec": "readable-stream@~1.1.9", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/multiparty", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "browser": { + "util": false + }, + "bugs": { + "url": "https://github.com/isaacs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js v0.11.x", + "devDependencies": { + "tap": "~0.2.6" + }, + "homepage": "https://github.com/isaacs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/readable-stream.git" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "version": "1.1.14" +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/passthrough.js rename to node_modules/readable-stream/passthrough.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js similarity index 76% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/readable.js rename to node_modules/readable-stream/readable.js index 09b8bf509..2a8b5c6b5 100644 --- a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/readable.js +++ b/node_modules/readable-stream/readable.js @@ -5,3 +5,6 @@ exports.Writable = require('./lib/_stream_writable.js'); exports.Duplex = require('./lib/_stream_duplex.js'); exports.Transform = require('./lib/_stream_transform.js'); exports.PassThrough = require('./lib/_stream_passthrough.js'); +if (!process.browser && process.env.READABLE_STREAM === 'disable') { + module.exports = require('stream'); +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/transform.js rename to node_modules/readable-stream/transform.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/readable-stream/writable.js rename to node_modules/readable-stream/writable.js diff --git a/node_modules/mongoose/node_modules/regexp-clone/.npmignore b/node_modules/regexp-clone/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/.npmignore rename to node_modules/regexp-clone/.npmignore diff --git a/node_modules/mongoose/node_modules/regexp-clone/.travis.yml b/node_modules/regexp-clone/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/.travis.yml rename to node_modules/regexp-clone/.travis.yml diff --git a/node_modules/mongoose/node_modules/regexp-clone/History.md b/node_modules/regexp-clone/History.md similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/History.md rename to node_modules/regexp-clone/History.md diff --git a/node_modules/mongoose/node_modules/regexp-clone/LICENSE b/node_modules/regexp-clone/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/LICENSE rename to node_modules/regexp-clone/LICENSE diff --git a/node_modules/mongoose/node_modules/regexp-clone/Makefile b/node_modules/regexp-clone/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/Makefile rename to node_modules/regexp-clone/Makefile diff --git a/node_modules/mongoose/node_modules/regexp-clone/README.md b/node_modules/regexp-clone/README.md similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/README.md rename to node_modules/regexp-clone/README.md diff --git a/node_modules/mongoose/node_modules/regexp-clone/index.js b/node_modules/regexp-clone/index.js similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/index.js rename to node_modules/regexp-clone/index.js diff --git a/node_modules/regexp-clone/package.json b/node_modules/regexp-clone/package.json new file mode 100644 index 000000000..c47de8d9d --- /dev/null +++ b/node_modules/regexp-clone/package.json @@ -0,0 +1,55 @@ +{ + "_from": "regexp-clone@0.0.1", + "_id": "regexp-clone@0.0.1", + "_inBundle": false, + "_integrity": "sha1-p8LgmJH9vzj7sQ03b7cwA+aKxYk=", + "_location": "/regexp-clone", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "regexp-clone@0.0.1", + "name": "regexp-clone", + "escapedName": "regexp-clone", + "rawSpec": "0.0.1", + "saveSpec": null, + "fetchSpec": "0.0.1" + }, + "_requiredBy": [ + "/mongoose", + "/mquery" + ], + "_resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "_shasum": "a7c2e09891fdbf38fbb10d376fb73003e68ac589", + "_spec": "regexp-clone@0.0.1", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/regexp-clone/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Clone RegExps with options", + "devDependencies": { + "mocha": "1.8.1" + }, + "homepage": "https://github.com/aheckmann/regexp-clone#readme", + "keywords": [ + "RegExp", + "clone" + ], + "license": "MIT", + "main": "index.js", + "name": "regexp-clone", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/regexp-clone.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.0.1" +} diff --git a/node_modules/mongoose/node_modules/regexp-clone/test/index.js b/node_modules/regexp-clone/test/index.js similarity index 100% rename from node_modules/mongoose/node_modules/regexp-clone/test/index.js rename to node_modules/regexp-clone/test/index.js diff --git a/node_modules/express3-handlebars/node_modules/semver/.npmignore b/node_modules/semver/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/semver/.npmignore rename to node_modules/semver/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE b/node_modules/semver/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/LICENSE rename to node_modules/semver/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/semver/Makefile b/node_modules/semver/Makefile similarity index 100% rename from node_modules/express3-handlebars/node_modules/semver/Makefile rename to node_modules/semver/Makefile diff --git a/node_modules/express3-handlebars/node_modules/semver/README.md b/node_modules/semver/README.md similarity index 55% rename from node_modules/express3-handlebars/node_modules/semver/README.md rename to node_modules/semver/README.md index ef0465981..4e95b8465 100644 --- a/node_modules/express3-handlebars/node_modules/semver/README.md +++ b/node_modules/semver/README.md @@ -34,7 +34,7 @@ As a command-line utility: ## Versions -A "version" is described by the v2.0.0 specification found at +A "version" is described by the `v2.0.0` specification found at . A leading `"="` or `"v"` character is stripped off and ignored. @@ -43,9 +43,10 @@ A leading `"="` or `"v"` character is stripped off and ignored. The following range styles are supported: -* `1.2.3` A specific version. When nothing else will do. Note that - build metadata is still ignored, so `1.2.3+build2012` will satisfy - this range. +* `1.2.3` A specific version. When nothing else will do. Must be a full + version number, with major, minor, and patch versions specified. + Note that build metadata is still ignored, so `1.2.3+build2012` will + satisfy this range. * `>1.2.3` Greater than a specific version. * `<1.2.3` Less than a specific version. If there is no prerelease tag on the version range, then no prerelease version will be allowed @@ -56,26 +57,33 @@ The following range styles are supported: * `<=1.2.3` Less than or equal to. In this case, prerelease versions ARE allowed, so `1.2.3-beta` would satisfy. * `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` -* `~1.2.3` := `>=1.2.3-0 <1.3.0-0` "Reasonably close to 1.2.3". When +* `~1.2.3` := `>=1.2.3-0 <1.3.0-0` "Reasonably close to `1.2.3`". When using tilde operators, prerelease versions are supported as well, but a prerelease of the next significant digit will NOT be satisfactory, so `1.3.0-beta` will not satisfy `~1.2.3`. -* `^1.2.3` := `>=1.2.3-0 <2.0.0-0` "Compatible with 1.2.3". When +* `^1.2.3` := `>=1.2.3-0 <2.0.0-0` "Compatible with `1.2.3`". When using caret operators, anything from the specified version (including prerelease) will be supported up to, but not including, the next major version (or its prereleases). `1.5.1` will satisfy `^1.2.3`, while `1.2.2` and `2.0.0-beta` will not. -* `^0.1.3` := `>=0.1.3-0 <0.2.0-0` "Compatible with 0.1.3". 0.x.x versions are +* `^0.1.3` := `>=0.1.3-0 <0.2.0-0` "Compatible with `0.1.3`". `0.x.x` versions are special: the first non-zero component indicates potentially breaking changes, meaning the caret operator matches any version with the same first non-zero component starting at the specified version. -* `^0.0.2` := `=0.0.2` "Only the version 0.0.2 is considered compatible" -* `~1.2` := `>=1.2.0-0 <1.3.0-0` "Any version starting with 1.2" -* `^1.2` := `>=1.2.0-0 <2.0.0-0` "Any version compatible with 1.2" -* `1.2.x` := `>=1.2.0-0 <1.3.0-0` "Any version starting with 1.2" -* `~1` := `>=1.0.0-0 <2.0.0-0` "Any version starting with 1" -* `^1` := `>=1.0.0-0 <2.0.0-0` "Any version compatible with 1" -* `1.x` := `>=1.0.0-0 <2.0.0-0` "Any version starting with 1" +* `^0.0.2` := `=0.0.2` "Only the version `0.0.2` is considered compatible" +* `~1.2` := `>=1.2.0-0 <1.3.0-0` "Any version starting with `1.2`" +* `^1.2` := `>=1.2.0-0 <2.0.0-0` "Any version compatible with `1.2`" +* `1.2.x` := `>=1.2.0-0 <1.3.0-0` "Any version starting with `1.2`" +* `1.2.*` Same as `1.2.x`. +* `1.2` Same as `1.2.x`. +* `~1` := `>=1.0.0-0 <2.0.0-0` "Any version starting with `1`" +* `^1` := `>=1.0.0-0 <2.0.0-0` "Any version compatible with `1`" +* `1.x` := `>=1.0.0-0 <2.0.0-0` "Any version starting with `1`" +* `1.*` Same as `1.x`. +* `1` Same as `1.x`. +* `*` Any version whatsoever. +* `x` Same as `*`. +* `""` (just an empty string) Same as `*`. Ranges can be joined with either a space (which implies "and") or a @@ -90,42 +98,50 @@ The resulting output will always be 100% strict, of course. Strict-mode Comparators and Ranges will be strict about the SemVer strings that they parse. -* valid(v): Return the parsed version, or null if it's not valid. -* inc(v, release): Return the version incremented by the release type - (major, minor, patch, or prerelease), or null if it's not valid. +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. ### Comparison -* gt(v1, v2): `v1 > v2` -* gte(v1, v2): `v1 >= v2` -* lt(v1, v2): `v1 < v2` -* lte(v1, v2): `v1 <= v2` -* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent, +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, even if they're not the exact same string. You already know how to compare strings. -* neq(v1, v2): `v1 != v2` The opposite of eq. -* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call the corresponding function above. `"==="` and `"!=="` do simple string comparison, but are included for completeness. Throws if an invalid comparison string is provided. -* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if - v2 is greater. Sorts in ascending order if passed to Array.sort(). -* rcompare(v1, v2): The reverse of compare. Sorts an array of versions - in descending order when passed to Array.sort(). +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. ### Ranges -* validRange(range): Return the valid range or null if it's not valid -* satisfies(version, range): Return true if the version satisfies the +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the range. -* maxSatisfying(versions, range): Return the highest version in the list - that satisfies the range, or null if none of them do. -* gtr(version, range): Return true if version is greater than all the +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `gtr(version, range)`: Return `true` if version is greater than all the versions possible in the range. -* ltr(version, range): Return true if version is less than all the +* `ltr(version, range)`: Return `true` if version is less than all the versions possible in the range. -* outside(version, range, hilo): Return true if the version is outside +* `outside(version, range, hilo)`: Return true if the version is outside the bounds of the range in either the high or low direction. The `hilo` argument must be either the string `'>'` or `'<'`. (This is the function called by `gtr` and `ltr`.) @@ -134,8 +150,8 @@ Note that, since ranges may be non-contiguous, a version might not be greater than a range, less than a range, *or* satisfy a range! For example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` until `2.0.0`, so the version `1.2.10` would not be greater than the -range (because 2.0.1 satisfies, which is higher), nor less than the -range (since 1.2.8 satisfies, which is lower), and it also does not +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not satisfy the range. If you want to know if a version satisfies or does not satisfy a diff --git a/node_modules/express3-handlebars/node_modules/semver/bin/semver b/node_modules/semver/bin/semver old mode 100644 new mode 100755 similarity index 94% rename from node_modules/express3-handlebars/node_modules/semver/bin/semver rename to node_modules/semver/bin/semver index 88b4c7d31..41c148fb8 --- a/node_modules/express3-handlebars/node_modules/semver/bin/semver +++ b/node_modules/semver/bin/semver @@ -39,6 +39,7 @@ function main () { case "-i": case "--inc": case "--increment": switch (argv[0]) { case "major": case "minor": case "patch": case "prerelease": + case "premajor": case "preminor": case "prepatch": inc = argv.shift() break default: @@ -106,8 +107,8 @@ function help () { ,"" ,"-i --increment []" ," Increment a version by the specified level. Level can" - ," be one of: major, minor, patch, or prerelease" - ," Default level is 'patch'." + ," be one of: major, minor, patch, premajor, preminor," + ," prepatch, or prerelease. Default level is 'patch'." ," Only one version may be specified." ,"" ,"-l --loose" diff --git a/node_modules/express3-handlebars/node_modules/semver/foot.js b/node_modules/semver/foot.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/semver/foot.js rename to node_modules/semver/foot.js diff --git a/node_modules/express3-handlebars/node_modules/semver/head.js b/node_modules/semver/head.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/semver/head.js rename to node_modules/semver/head.js diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 000000000..b07a49ecd --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,53 @@ +{ + "_from": "semver@2.x", + "_id": "semver@2.3.2", + "_inBundle": false, + "_integrity": "sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI=", + "_location": "/semver", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "semver@2.x", + "name": "semver", + "escapedName": "semver", + "rawSpec": "2.x", + "saveSpec": null, + "fetchSpec": "2.x" + }, + "_requiredBy": [ + "/express3-handlebars" + ], + "_resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "_shasum": "b9848f25d6cf36333073ec9ef8856d42f1233e52", + "_spec": "semver@2.x", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express3-handlebars", + "bin": { + "semver": "bin/semver" + }, + "browser": "semver.browser.js", + "bugs": { + "url": "https://github.com/isaacs/node-semver/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The semantic version parser used by npm.", + "devDependencies": { + "tap": "0.x >=0.0.4", + "uglify-js": "~2.3.6" + }, + "homepage": "https://github.com/isaacs/node-semver#readme", + "license": "BSD", + "main": "semver.js", + "min": "semver.min.js", + "name": "semver", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-semver.git" + }, + "scripts": { + "prepublish": "make", + "test": "tap test/*.js" + }, + "version": "2.3.2" +} diff --git a/node_modules/express3-handlebars/node_modules/semver/semver.browser.js b/node_modules/semver/semver.browser.js similarity index 94% rename from node_modules/express3-handlebars/node_modules/semver/semver.browser.js rename to node_modules/semver/semver.browser.js index e05ecc19c..8c89c7151 100644 --- a/node_modules/express3-handlebars/node_modules/semver/semver.browser.js +++ b/node_modules/semver/semver.browser.js @@ -248,6 +248,8 @@ function SemVer(version, loose) { return version; else version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); } if (!(this instanceof SemVer)) @@ -320,7 +322,7 @@ SemVer.prototype.comparePre = function(other) { return -1; else if (!this.prerelease.length && other.prerelease.length) return 1; - else if (!this.prerelease.lenth && !other.prerelease.length) + else if (!this.prerelease.length && !other.prerelease.length) return 0; var i = 0; @@ -341,19 +343,53 @@ SemVer.prototype.comparePre = function(other) { } while (++i); }; +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. SemVer.prototype.inc = function(release) { switch (release) { + case 'premajor': + this.inc('major'); + this.inc('pre'); + break; + case 'preminor': + this.inc('minor'); + this.inc('pre'); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch'); + this.inc('pre'); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch'); + this.inc('pre'); + break; case 'major': this.major++; this.minor = -1; case 'minor': this.minor++; - this.patch = -1; + this.patch = 0; + this.prerelease = []; + break; case 'patch': - this.patch++; + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; this.prerelease = []; break; - case 'prerelease': + // This probably shouldn't be used publically. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': if (this.prerelease.length === 0) this.prerelease = [0]; else { @@ -516,6 +552,9 @@ Comparator.prototype.parse = function(comp) { throw new TypeError('Invalid comparator: ' + comp); this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + // if it literally is just '>' or '' then allow anything. if (!m[2]) this.semver = ANY; diff --git a/node_modules/express3-handlebars/node_modules/semver/semver.js b/node_modules/semver/semver.js similarity index 94% rename from node_modules/express3-handlebars/node_modules/semver/semver.js rename to node_modules/semver/semver.js index 9e9470d86..75f60f2ae 100644 --- a/node_modules/express3-handlebars/node_modules/semver/semver.js +++ b/node_modules/semver/semver.js @@ -258,6 +258,8 @@ function SemVer(version, loose) { return version; else version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); } if (!(this instanceof SemVer)) @@ -330,7 +332,7 @@ SemVer.prototype.comparePre = function(other) { return -1; else if (!this.prerelease.length && other.prerelease.length) return 1; - else if (!this.prerelease.lenth && !other.prerelease.length) + else if (!this.prerelease.length && !other.prerelease.length) return 0; var i = 0; @@ -351,19 +353,53 @@ SemVer.prototype.comparePre = function(other) { } while (++i); }; +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. SemVer.prototype.inc = function(release) { switch (release) { + case 'premajor': + this.inc('major'); + this.inc('pre'); + break; + case 'preminor': + this.inc('minor'); + this.inc('pre'); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch'); + this.inc('pre'); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch'); + this.inc('pre'); + break; case 'major': this.major++; this.minor = -1; case 'minor': this.minor++; - this.patch = -1; + this.patch = 0; + this.prerelease = []; + break; case 'patch': - this.patch++; + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; this.prerelease = []; break; - case 'prerelease': + // This probably shouldn't be used publically. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': if (this.prerelease.length === 0) this.prerelease = [0]; else { @@ -526,6 +562,9 @@ Comparator.prototype.parse = function(comp) { throw new TypeError('Invalid comparator: ' + comp); this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + // if it literally is just '>' or '' then allow anything. if (!m[2]) this.semver = ANY; diff --git a/node_modules/semver/semver.min.js b/node_modules/semver/semver.min.js new file mode 100644 index 000000000..22e320136 --- /dev/null +++ b/node_modules/semver/semver.min.js @@ -0,0 +1 @@ +(function(e){if(typeof module==="object"&&module.exports===e)e=module.exports=H;e.SEMVER_SPEC_VERSION="2.0.0";var r=e.re=[];var t=e.src=[];var n=0;var i=n++;t[i]="0|[1-9]\\d*";var s=n++;t[s]="[0-9]+";var a=n++;t[a]="\\d*[a-zA-Z-][a-zA-Z0-9-]*";var o=n++;t[o]="("+t[i]+")\\."+"("+t[i]+")\\."+"("+t[i]+")";var f=n++;t[f]="("+t[s]+")\\."+"("+t[s]+")\\."+"("+t[s]+")";var u=n++;t[u]="(?:"+t[i]+"|"+t[a]+")";var c=n++;t[c]="(?:"+t[s]+"|"+t[a]+")";var l=n++;t[l]="(?:-("+t[u]+"(?:\\."+t[u]+")*))";var p=n++;t[p]="(?:-?("+t[c]+"(?:\\."+t[c]+")*))";var h=n++;t[h]="[0-9A-Za-z-]+";var v=n++;t[v]="(?:\\+("+t[h]+"(?:\\."+t[h]+")*))";var m=n++;var g="v?"+t[o]+t[l]+"?"+t[v]+"?";t[m]="^"+g+"$";var w="[v=\\s]*"+t[f]+t[p]+"?"+t[v]+"?";var d=n++;t[d]="^"+w+"$";var y=n++;t[y]="((?:<|>)?=?)";var b=n++;t[b]=t[s]+"|x|X|\\*";var $=n++;t[$]=t[i]+"|x|X|\\*";var j=n++;t[j]="[v=\\s]*("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:\\.("+t[$]+")"+"(?:("+t[l]+")"+")?)?)?";var k=n++;t[k]="[v=\\s]*("+t[b]+")"+"(?:\\.("+t[b]+")"+"(?:\\.("+t[b]+")"+"(?:("+t[p]+")"+")?)?)?";var E=n++;t[E]="^"+t[y]+"\\s*"+t[j]+"$";var x=n++;t[x]="^"+t[y]+"\\s*"+t[k]+"$";var R=n++;t[R]="(?:~>?)";var S=n++;t[S]="(\\s*)"+t[R]+"\\s+";r[S]=new RegExp(t[S],"g");var V="$1~";var I=n++;t[I]="^"+t[R]+t[j]+"$";var T=n++;t[T]="^"+t[R]+t[k]+"$";var A=n++;t[A]="(?:\\^)";var C=n++;t[C]="(\\s*)"+t[A]+"\\s+";r[C]=new RegExp(t[C],"g");var M="$1^";var z=n++;t[z]="^"+t[A]+t[j]+"$";var P=n++;t[P]="^"+t[A]+t[k]+"$";var Z=n++;t[Z]="^"+t[y]+"\\s*("+w+")$|^$";var q=n++;t[q]="^"+t[y]+"\\s*("+g+")$|^$";var L=n++;t[L]="(\\s*)"+t[y]+"\\s*("+w+"|"+t[j]+")";r[L]=new RegExp(t[L],"g");var X="$1$2$3";var _=n++;t[_]="^\\s*("+t[j]+")"+"\\s+-\\s+"+"("+t[j]+")"+"\\s*$";var N=n++;t[N]="^\\s*("+t[k]+")"+"\\s+-\\s+"+"("+t[k]+")"+"\\s*$";var O=n++;t[O]="(<|>)?=?\\s*\\*";for(var B=0;B'};H.prototype.toString=function(){return this.version};H.prototype.compare=function(e){if(!(e instanceof H))e=new H(e,this.loose);return this.compareMain(e)||this.comparePre(e)};H.prototype.compareMain=function(e){if(!(e instanceof H))e=new H(e,this.loose);return Q(this.major,e.major)||Q(this.minor,e.minor)||Q(this.patch,e.patch)};H.prototype.comparePre=function(e){if(!(e instanceof H))e=new H(e,this.loose);if(this.prerelease.length&&!e.prerelease.length)return-1;else if(!this.prerelease.length&&e.prerelease.length)return 1;else if(!this.prerelease.length&&!e.prerelease.length)return 0;var r=0;do{var t=this.prerelease[r];var n=e.prerelease[r];if(t===undefined&&n===undefined)return 0;else if(n===undefined)return 1;else if(t===undefined)return-1;else if(t===n)continue;else return Q(t,n)}while(++r)};H.prototype.inc=function(e){switch(e){case"premajor":this.inc("major");this.inc("pre");break;case"preminor":this.inc("minor");this.inc("pre");break;case"prepatch":this.prerelease.length=0;this.inc("patch");this.inc("pre");break;case"prerelease":if(this.prerelease.length===0)this.inc("patch");this.inc("pre");break;case"major":this.major++;this.minor=-1;case"minor":this.minor++;this.patch=0;this.prerelease=[];break;case"patch":if(this.prerelease.length===0)this.patch++;this.prerelease=[];break;case"pre":if(this.prerelease.length===0)this.prerelease=[0];else{var r=this.prerelease.length;while(--r>=0){if(typeof this.prerelease[r]==="number"){this.prerelease[r]++;r=-2}}if(r===-1)this.prerelease.push(0)}break;default:throw new Error("invalid increment argument: "+e)}this.format();return this};e.inc=J;function J(e,r,t){try{return new H(e,t).inc(r).version}catch(n){return null}}e.compareIdentifiers=Q;var K=/^[0-9]+$/;function Q(e,r){var t=K.test(e);var n=K.test(r);if(t&&n){e=+e;r=+r}return t&&!n?-1:n&&!t?1:er?1:0}e.rcompareIdentifiers=U;function U(e,r){return Q(r,e)}e.compare=W;function W(e,r,t){return new H(e,t).compare(r)}e.compareLoose=Y;function Y(e,r){return W(e,r,true)}e.rcompare=er;function er(e,r,t){return W(r,e,t)}e.sort=rr;function rr(r,t){return r.sort(function(r,n){return e.compare(r,n,t)})}e.rsort=tr;function tr(r,t){return r.sort(function(r,n){return e.rcompare(r,n,t)})}e.gt=nr;function nr(e,r,t){return W(e,r,t)>0}e.lt=ir;function ir(e,r,t){return W(e,r,t)<0}e.eq=sr;function sr(e,r,t){return W(e,r,t)===0}e.neq=ar;function ar(e,r,t){return W(e,r,t)!==0}e.gte=or;function or(e,r,t){return W(e,r,t)>=0}e.lte=fr;function fr(e,r,t){return W(e,r,t)<=0}e.cmp=ur;function ur(e,r,t,n){var i;switch(r){case"===":i=e===t;break;case"!==":i=e!==t;break;case"":case"=":case"==":i=sr(e,t,n);break;case"!=":i=ar(e,t,n);break;case">":i=nr(e,t,n);break;case">=":i=or(e,t,n);break;case"<":i=ir(e,t,n);break;case"<=":i=fr(e,t,n);break;default:throw new TypeError("Invalid operator: "+r)}return i}e.Comparator=cr;function cr(e,r){if(e instanceof cr){if(e.loose===r)return e;else e=e.value}if(!(this instanceof cr))return new cr(e,r);this.loose=r;this.parse(e);if(this.semver===lr)this.value="";else this.value=this.operator+this.semver.version}var lr={};cr.prototype.parse=function(e){var t=this.loose?r[Z]:r[q];var n=e.match(t);if(!n)throw new TypeError("Invalid comparator: "+e);this.operator=n[1];if(this.operator==="=")this.operator="";if(!n[2])this.semver=lr;else{this.semver=new H(n[2],this.loose);if(this.operator==="<"&&!this.semver.prerelease.length){this.semver.prerelease=["0"];this.semver.format()}}};cr.prototype.inspect=function(){return''};cr.prototype.toString=function(){return this.value};cr.prototype.test=function(e){return this.semver===lr?true:ur(e,this.operator,this.semver,this.loose)};e.Range=pr;function pr(e,r){if(e instanceof pr&&e.loose===r)return e;if(!(this instanceof pr))return new pr(e,r);this.loose=r;this.raw=e;this.set=e.split(/\s*\|\|\s*/).map(function(e){return this.parseRange(e.trim())},this).filter(function(e){return e.length});if(!this.set.length){throw new TypeError("Invalid SemVer Range: "+e)}this.format()}pr.prototype.inspect=function(){return''};pr.prototype.format=function(){this.range=this.set.map(function(e){return e.join(" ").trim()}).join("||").trim();return this.range};pr.prototype.toString=function(){return this.range};pr.prototype.parseRange=function(e){var t=this.loose;e=e.trim();var n=t?r[N]:r[_];e=e.replace(n,kr);e=e.replace(r[L],X);e=e.replace(r[S],V);e=e.replace(r[C],M);e=e.split(/\s+/).join(" ");var i=t?r[Z]:r[q];var s=e.split(" ").map(function(e){return vr(e,t)}).join(" ").split(/\s+/);if(this.loose){s=s.filter(function(e){return!!e.match(i)})}s=s.map(function(e){return new cr(e,t)});return s};e.toComparators=hr;function hr(e,r){return new pr(e,r).set.map(function(e){return e.map(function(e){return e.value}).join(" ").trim().split(" ")})}function vr(e,r){e=dr(e,r);e=gr(e,r);e=br(e,r);e=jr(e,r);return e}function mr(e){return!e||e.toLowerCase()==="x"||e==="*"}function gr(e,r){return e.trim().split(/\s+/).map(function(e){return wr(e,r)}).join(" ")}function wr(e,t){var n=t?r[T]:r[I];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n))s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else if(i){if(i.charAt(0)!=="-")i="-"+i;s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0";return s})}function dr(e,r){return e.trim().split(/\s+/).map(function(e){return yr(e,r)}).join(" ")}function yr(e,t){var n=t?r[P]:r[z];return e.replace(n,function(e,r,t,n,i){var s;if(mr(r))s="";else if(mr(t))s=">="+r+".0.0-0 <"+(+r+1)+".0.0-0";else if(mr(n)){if(r==="0")s=">="+r+"."+t+".0-0 <"+r+"."+(+t+1)+".0-0";else s=">="+r+"."+t+".0-0 <"+(+r+1)+".0.0-0"}else if(i){if(i.charAt(0)!=="-")i="-"+i;if(r==="0"){if(t==="0")s="="+r+"."+t+"."+n+i;else s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+i+" <"+(+r+1)+".0.0-0"}else{if(r==="0"){if(t==="0")s="="+r+"."+t+"."+n;else s=">="+r+"."+t+"."+n+"-0"+" <"+r+"."+(+t+1)+".0-0"}else s=">="+r+"."+t+"."+n+"-0"+" <"+(+r+1)+".0.0-0"}return s})}function br(e,r){return e.split(/\s+/).map(function(e){return $r(e,r)}).join(" ")}function $r(e,t){e=e.trim();var n=t?r[x]:r[E];return e.replace(n,function(e,r,t,n,i,s){var a=mr(t);var o=a||mr(n);var f=o||mr(i);var u=f;if(r==="="&&u)r="";if(r&&u){if(a)t=0;if(o)n=0;if(f)i=0;if(r===">"){r=">=";if(a){}else if(o){t=+t+1;n=0;i=0}else if(f){n=+n+1;i=0}}e=r+t+"."+n+"."+i+"-0"}else if(a){e="*"}else if(o){e=">="+t+".0.0-0 <"+(+t+1)+".0.0-0"}else if(f){e=">="+t+"."+n+".0-0 <"+t+"."+(+n+1)+".0-0"}return e})}function jr(e,t){return e.trim().replace(r[O],"")}function kr(e,r,t,n,i,s,a,o,f,u,c,l,p){if(mr(t))r="";else if(mr(n))r=">="+t+".0.0-0";else if(mr(i))r=">="+t+"."+n+".0-0";else r=">="+r;if(mr(f))o="";else if(mr(u))o="<"+(+f+1)+".0.0-0";else if(mr(c))o="<"+f+"."+(+u+1)+".0-0";else if(l)o="<="+f+"."+u+"."+c+"-"+l;else o="<="+o;return(r+" "+o).trim()}pr.prototype.test=function(e){if(!e)return false;for(var r=0;r",t)}e.outside=Tr;function Tr(e,r,t,n){e=new H(e,n);r=new pr(r,n);var i,s,a,o,f;switch(t){case">":i=nr;s=fr;a=ir;o=">";f=">=";break;case"<":i=ir;s=or;a=nr;o="<";f="<=";break;default:throw new TypeError('Must provide a hilo val of "<" or ">"')}if(xr(e,r,n)){return false}for(var u=0;u=0.1.0-0 <0.2.0-0'], ['^1.0', '>=1.0.0-0 <2.0.0-0'], ['^1.2', '>=1.2.0-0 <2.0.0-0'], - ['^0.0.1', '=0.0.1'], - ['^0.0.1-beta', '=0.0.1-beta'], + ['^0.0.1', '0.0.1'], + ['^0.0.1-beta', '0.0.1-beta'], ['^0.1.2', '>=0.1.2-0 <0.2.0-0'], ['^1.2.3', '>=1.2.3-0 <2.0.0-0'], ['^1.2.3-beta.4', '>=1.2.3-beta.4 <2.0.0-0'], @@ -500,6 +517,21 @@ test('\ncomparators test', function(t) { t.end(); }); +test('\ninvalid version numbers', function(t) { + ['1.2.3.4', + 'NOT VALID', + 1.2, + null, + 'Infinity.NaN.Infinity' + ].forEach(function(v) { + t.throws(function() { + new SemVer(v); + }, {name:'TypeError', message:'Invalid Version: ' + v}); + }); + + t.end(); +}); + test('\nstrict vs loose version numbers', function(t) { [['=1.2.3', '1.2.3'], ['01.02.03', '1.2.3'], diff --git a/node_modules/express3-handlebars/node_modules/semver/test/ltr.js b/node_modules/semver/test/ltr.js similarity index 98% rename from node_modules/express3-handlebars/node_modules/semver/test/ltr.js rename to node_modules/semver/test/ltr.js index a4f503a3c..d1461371b 100644 --- a/node_modules/express3-handlebars/node_modules/semver/test/ltr.js +++ b/node_modules/semver/test/ltr.js @@ -162,7 +162,8 @@ test('\nnegative ltr tests', function(t) { ['^0.1.0 || ~3.0.1 || 5.0.0', '3.2.0'], ['^0.1.0 || ~3.0.1 || 5.0.0', '1.0.0beta', true], ['^0.1.0 || ~3.0.1 || 5.0.0', '5.0.0-0', true], - ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0'] + ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0'], + ['=0.1.0', '1.0.0'] ].forEach(function(tuple) { var range = tuple[0]; var version = tuple[1]; diff --git a/node_modules/express3-handlebars/node_modules/semver/test/no-module.js b/node_modules/semver/test/no-module.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/semver/test/no-module.js rename to node_modules/semver/test/no-module.js diff --git a/node_modules/mongoose/node_modules/mquery/node_modules/debug/.npmignore b/node_modules/send/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/mquery/node_modules/debug/.npmignore rename to node_modules/send/.npmignore diff --git a/node_modules/express/node_modules/send/History.md b/node_modules/send/History.md similarity index 100% rename from node_modules/express/node_modules/send/History.md rename to node_modules/send/History.md diff --git a/node_modules/express/node_modules/send/Makefile b/node_modules/send/Makefile similarity index 100% rename from node_modules/express/node_modules/send/Makefile rename to node_modules/send/Makefile diff --git a/node_modules/express/node_modules/send/Readme.md b/node_modules/send/Readme.md similarity index 100% rename from node_modules/express/node_modules/send/Readme.md rename to node_modules/send/Readme.md diff --git a/node_modules/express/node_modules/send/index.js b/node_modules/send/index.js similarity index 100% rename from node_modules/express/node_modules/send/index.js rename to node_modules/send/index.js diff --git a/node_modules/express/node_modules/send/lib/send.js b/node_modules/send/lib/send.js similarity index 100% rename from node_modules/express/node_modules/send/lib/send.js rename to node_modules/send/lib/send.js diff --git a/node_modules/express/node_modules/send/lib/utils.js b/node_modules/send/lib/utils.js similarity index 100% rename from node_modules/express/node_modules/send/lib/utils.js rename to node_modules/send/lib/utils.js diff --git a/node_modules/send/package.json b/node_modules/send/package.json new file mode 100644 index 000000000..5ac215326 --- /dev/null +++ b/node_modules/send/package.json @@ -0,0 +1,64 @@ +{ + "_from": "send@0.1.4", + "_id": "send@0.1.4", + "_inBundle": false, + "_integrity": "sha1-vnDY0b4B3mGCGvE3gLUDRaT3Gr0=", + "_location": "/send", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "send@0.1.4", + "name": "send", + "escapedName": "send", + "rawSpec": "0.1.4", + "saveSpec": null, + "fetchSpec": "0.1.4" + }, + "_requiredBy": [ + "/connect", + "/express" + ], + "_resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "_shasum": "be70d8d1be01de61821af13780b50345a4f71abd", + "_spec": "send@0.1.4", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/express", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "bugs": { + "url": "https://github.com/visionmedia/send/issues" + }, + "bundleDependencies": false, + "dependencies": { + "debug": "*", + "fresh": "0.2.0", + "mime": "~1.2.9", + "range-parser": "0.0.4" + }, + "deprecated": false, + "description": "Better streaming static file server with Range and conditional-GET support", + "devDependencies": { + "connect": "2.x", + "mocha": "*", + "should": "*", + "supertest": "0.0.1" + }, + "homepage": "https://github.com/visionmedia/send#readme", + "keywords": [ + "static", + "file", + "server" + ], + "main": "index", + "name": "send", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/send.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.1.4" +} diff --git a/node_modules/sigmund/LICENSE b/node_modules/sigmund/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/node_modules/sigmund/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md b/node_modules/sigmund/README.md similarity index 91% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md rename to node_modules/sigmund/README.md index 7e365129e..25a38a53f 100644 --- a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/README.md +++ b/node_modules/sigmund/README.md @@ -11,7 +11,7 @@ string key suitable for caches and the like. function doSomething (someObj) { var key = sigmund(someObj, maxDepth) // max depth defaults to 10 var cached = cache.get(key) - if (cached) return cached) + if (cached) return cached var result = expensiveCalculation(someObj) cache.set(key, result) @@ -26,11 +26,11 @@ For example, the object `{0:'foo'}` will be treated identically to the array `['foo']`. Also, just as there is no way to summon the soul from the scribblings -of a cocain-addled psychoanalyst, there is no way to revive the object +of a cocaine-addled psychoanalyst, there is no way to revive the object from the signature string that sigmund gives you. In fact, it's barely even readable. -As with `sys.inspect` and `JSON.stringify`, larger objects will +As with `util.inspect` and `JSON.stringify`, larger objects will produce larger signature strings. Because sigmund is a bit less strict than the more thorough diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js b/node_modules/sigmund/bench.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/bench.js rename to node_modules/sigmund/bench.js diff --git a/node_modules/sigmund/package.json b/node_modules/sigmund/package.json new file mode 100644 index 000000000..12629d38b --- /dev/null +++ b/node_modules/sigmund/package.json @@ -0,0 +1,63 @@ +{ + "_from": "sigmund@~1.0.0", + "_id": "sigmund@1.0.1", + "_inBundle": false, + "_integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=", + "_location": "/sigmund", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "sigmund@~1.0.0", + "name": "sigmund", + "escapedName": "sigmund", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/minimatch" + ], + "_resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "_shasum": "3ff21f198cad2175f9f3b781853fd94d0d19b590", + "_spec": "sigmund@~1.0.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/minimatch", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/sigmund/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Quick and dirty signatures for Objects.", + "devDependencies": { + "tap": "~0.3.0" + }, + "directories": { + "test": "test" + }, + "homepage": "https://github.com/isaacs/sigmund#readme", + "keywords": [ + "object", + "signature", + "key", + "data", + "psychoanalysis" + ], + "license": "ISC", + "main": "sigmund.js", + "name": "sigmund", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/sigmund.git" + }, + "scripts": { + "bench": "node bench.js", + "test": "tap test/*.js" + }, + "version": "1.0.1" +} diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js b/node_modules/sigmund/sigmund.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/sigmund.js rename to node_modules/sigmund/sigmund.js diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/test/basic.js b/node_modules/sigmund/test/basic.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/sigmund/test/basic.js rename to node_modules/sigmund/test/basic.js diff --git a/node_modules/mongoose/node_modules/sliced/.npmignore b/node_modules/sliced/.npmignore similarity index 100% rename from node_modules/mongoose/node_modules/sliced/.npmignore rename to node_modules/sliced/.npmignore diff --git a/node_modules/mongoose/node_modules/sliced/.travis.yml b/node_modules/sliced/.travis.yml similarity index 100% rename from node_modules/mongoose/node_modules/sliced/.travis.yml rename to node_modules/sliced/.travis.yml diff --git a/node_modules/mongoose/node_modules/sliced/History.md b/node_modules/sliced/History.md similarity index 100% rename from node_modules/mongoose/node_modules/sliced/History.md rename to node_modules/sliced/History.md diff --git a/node_modules/mongoose/node_modules/sliced/LICENSE b/node_modules/sliced/LICENSE similarity index 100% rename from node_modules/mongoose/node_modules/sliced/LICENSE rename to node_modules/sliced/LICENSE diff --git a/node_modules/mongoose/node_modules/sliced/Makefile b/node_modules/sliced/Makefile similarity index 100% rename from node_modules/mongoose/node_modules/sliced/Makefile rename to node_modules/sliced/Makefile diff --git a/node_modules/mongoose/node_modules/sliced/README.md b/node_modules/sliced/README.md similarity index 100% rename from node_modules/mongoose/node_modules/sliced/README.md rename to node_modules/sliced/README.md diff --git a/node_modules/mongoose/node_modules/sliced/bench.js b/node_modules/sliced/bench.js similarity index 100% rename from node_modules/mongoose/node_modules/sliced/bench.js rename to node_modules/sliced/bench.js diff --git a/node_modules/mongoose/node_modules/sliced/component.json b/node_modules/sliced/component.json similarity index 100% rename from node_modules/mongoose/node_modules/sliced/component.json rename to node_modules/sliced/component.json diff --git a/node_modules/mongoose/node_modules/sliced/index.js b/node_modules/sliced/index.js similarity index 100% rename from node_modules/mongoose/node_modules/sliced/index.js rename to node_modules/sliced/index.js diff --git a/node_modules/mongoose/node_modules/sliced/lib/sliced.js b/node_modules/sliced/lib/sliced.js similarity index 100% rename from node_modules/mongoose/node_modules/sliced/lib/sliced.js rename to node_modules/sliced/lib/sliced.js diff --git a/node_modules/sliced/package.json b/node_modules/sliced/package.json new file mode 100644 index 000000000..7d0efea9c --- /dev/null +++ b/node_modules/sliced/package.json @@ -0,0 +1,57 @@ +{ + "_from": "sliced@0.0.5", + "_id": "sliced@0.0.5", + "_inBundle": false, + "_integrity": "sha1-XtwETKTrb3gW1Qui/GPiXY/kcH8=", + "_location": "/sliced", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "sliced@0.0.5", + "name": "sliced", + "escapedName": "sliced", + "rawSpec": "0.0.5", + "saveSpec": null, + "fetchSpec": "0.0.5" + }, + "_requiredBy": [ + "/mongoose", + "/mquery" + ], + "_resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "_shasum": "5edc044ca4eb6f7816d50ba2fc63e25d8fe4707f", + "_spec": "sliced@0.0.5", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/mongoose", + "author": { + "name": "Aaron Heckmann", + "email": "aaron.heckmann+github@gmail.com" + }, + "bugs": { + "url": "https://github.com/aheckmann/sliced/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "A faster Node.js alternative to Array.prototype.slice.call(arguments)", + "devDependencies": { + "benchmark": "~1.0.0", + "mocha": "1.5.0" + }, + "homepage": "https://github.com/aheckmann/sliced#readme", + "keywords": [ + "arguments", + "slice", + "array" + ], + "license": "MIT", + "main": "index.js", + "name": "sliced", + "repository": { + "type": "git", + "url": "git://github.com/aheckmann/sliced.git" + }, + "scripts": { + "test": "make test" + }, + "version": "0.0.5" +} diff --git a/node_modules/mongoose/node_modules/sliced/test/index.js b/node_modules/sliced/test/index.js similarity index 100% rename from node_modules/mongoose/node_modules/sliced/test/index.js rename to node_modules/sliced/test/index.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/.npmignore b/node_modules/source-map/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/.npmignore rename to node_modules/source-map/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/.travis.yml b/node_modules/source-map/.travis.yml similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/.travis.yml rename to node_modules/source-map/.travis.yml diff --git a/node_modules/source-map/CHANGELOG.md b/node_modules/source-map/CHANGELOG.md new file mode 100644 index 000000000..2e7ca5d27 --- /dev/null +++ b/node_modules/source-map/CHANGELOG.md @@ -0,0 +1,194 @@ +# Change Log + +## 0.1.43 + +* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue + #148 for some discussion and issues #150, #151, and #152 for implementations. + +## 0.1.42 + +* Fix an issue where `SourceNode`s from different versions of the source-map + library couldn't be used in conjunction with each other. See issue #142. + +## 0.1.41 + +* Fix a bug with getting the source content of relative sources with a "./" + prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768). + +* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the + column span of each mapping. + +* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find + all generated positions associated with a given original source and line. + +## 0.1.40 + +* Performance improvements for parsing source maps in SourceMapConsumer. + +## 0.1.39 + +* Fix a bug where setting a source's contents to null before any source content + had been set before threw a TypeError. See issue #131. + +## 0.1.38 + +* Fix a bug where finding relative paths from an empty path were creating + absolute paths. See issue #129. + +## 0.1.37 + +* Fix a bug where if the source root was an empty string, relative source paths + would turn into absolute source paths. Issue #124. + +## 0.1.36 + +* Allow the `names` mapping property to be an empty string. Issue #121. + +## 0.1.35 + +* A third optional parameter was added to `SourceNode.fromStringWithSourceMap` + to specify a path that relative sources in the second parameter should be + relative to. Issue #105. + +* If no file property is given to a `SourceMapGenerator`, then the resulting + source map will no longer have a `null` file property. The property will + simply not exist. Issue #104. + +* Fixed a bug where consecutive newlines were ignored in `SourceNode`s. + Issue #116. + +## 0.1.34 + +* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103. + +* Fix bug involving source contents and the + `SourceMapGenerator.prototype.applySourceMap`. Issue #100. + +## 0.1.33 + +* Fix some edge cases surrounding path joining and URL resolution. + +* Add a third parameter for relative path to + `SourceMapGenerator.prototype.applySourceMap`. + +* Fix issues with mappings and EOLs. + +## 0.1.32 + +* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns + (issue 92). + +* Fixed test runner to actually report number of failed tests as its process + exit code. + +* Fixed a typo when reporting bad mappings (issue 87). + +## 0.1.31 + +* Delay parsing the mappings in SourceMapConsumer until queried for a source + location. + +* Support Sass source maps (which at the time of writing deviate from the spec + in small ways) in SourceMapConsumer. + +## 0.1.30 + +* Do not join source root with a source, when the source is a data URI. + +* Extend the test runner to allow running single specific test files at a time. + +* Performance improvements in `SourceNode.prototype.walk` and + `SourceMapConsumer.prototype.eachMapping`. + +* Source map browser builds will now work inside Workers. + +* Better error messages when attempting to add an invalid mapping to a + `SourceMapGenerator`. + +## 0.1.29 + +* Allow duplicate entries in the `names` and `sources` arrays of source maps + (usually from TypeScript) we are parsing. Fixes github issue 72. + +## 0.1.28 + +* Skip duplicate mappings when creating source maps from SourceNode; github + issue 75. + +## 0.1.27 + +* Don't throw an error when the `file` property is missing in SourceMapConsumer, + we don't use it anyway. + +## 0.1.26 + +* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70. + +## 0.1.25 + +* Make compatible with browserify + +## 0.1.24 + +* Fix issue with absolute paths and `file://` URIs. See + https://bugzilla.mozilla.org/show_bug.cgi?id=885597 + +## 0.1.23 + +* Fix issue with absolute paths and sourcesContent, github issue 64. + +## 0.1.22 + +* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21. + +## 0.1.21 + +* Fixed handling of sources that start with a slash so that they are relative to + the source root's host. + +## 0.1.20 + +* Fixed github issue #43: absolute URLs aren't joined with the source root + anymore. + +## 0.1.19 + +* Using Travis CI to run tests. + +## 0.1.18 + +* Fixed a bug in the handling of sourceRoot. + +## 0.1.17 + +* Added SourceNode.fromStringWithSourceMap. + +## 0.1.16 + +* Added missing documentation. + +* Fixed the generating of empty mappings in SourceNode. + +## 0.1.15 + +* Added SourceMapGenerator.applySourceMap. + +## 0.1.14 + +* The sourceRoot is now handled consistently. + +## 0.1.13 + +* Added SourceMapGenerator.fromSourceMap. + +## 0.1.12 + +* SourceNode now generates empty mappings too. + +## 0.1.11 + +* Added name support to SourceNode. + +## 0.1.10 + +* Added sourcesContent support to the customer and generator. diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/LICENSE b/node_modules/source-map/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/LICENSE rename to node_modules/source-map/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/Makefile.dryice.js b/node_modules/source-map/Makefile.dryice.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/Makefile.dryice.js rename to node_modules/source-map/Makefile.dryice.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/README.md b/node_modules/source-map/README.md similarity index 85% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/README.md rename to node_modules/source-map/README.md index c20437bc6..59767aa39 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/README.md +++ b/node_modules/source-map/README.md @@ -173,7 +173,12 @@ following attributes: * `mappings`: A string of base64 VLQs which contain the actual mappings. -* `file`: The generated filename this source map is associated with. +* `file`: Optional. The generated filename this source map is associated with. + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. #### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) @@ -216,6 +221,22 @@ and an object is returned with the following properties: * `column`: The column number in the generated source, or null. +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source +and line provided. The only argument is an object with the following +properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. + +* `column`: The column number in the generated source, or null. + #### SourceMapConsumer.prototype.sourceContentFor(source) Returns the original source content for the source provided. The only @@ -244,14 +265,19 @@ generated line/column in this source map. An instance of the SourceMapGenerator represents a source map which is being built incrementally. -#### new SourceMapGenerator(startOfSourceMap) +#### new SourceMapGenerator([startOfSourceMap]) -To create a new one, you must pass an object with the following properties: +You may pass an object with the following properties: * `file`: The filename of the generated source that this source map is associated with. -* `sourceRoot`: An optional root for all relative URLs in this source map. +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. #### SourceMapGenerator.fromSourceMap(sourceMapConsumer) @@ -281,7 +307,7 @@ Set the source content for an original source file. * `sourceContent` the content of the source file. -#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile]) +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) Applies a SourceMap for a source file to the SourceMap. Each mapping to the supplied source file is rewritten using the @@ -291,7 +317,19 @@ is the minimium of this map and the supplied map. * `sourceMapConsumer`: The SourceMap to be applied. * `sourceFile`: Optional. The filename of the source file. - If omitted, sourceMapConsumer.file will be used. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) #### SourceMapGenerator.prototype.toString() @@ -305,7 +343,7 @@ column information associated between those snippets and the original source code. This is useful as the final intermediate representation a compiler might use before outputting the generated JS and source map. -#### new SourceNode(line, column, source[, chunk[, name]]) +#### new SourceNode([line, column, source[, chunk[, name]]]) * `line`: The original line number associated with this source node, or null if it isn't associated with an original line. @@ -313,14 +351,14 @@ use before outputting the generated JS and source map. * `column`: The original column number associated with this source node, or null if it isn't associated with an original column. -* `source`: The original source's filename. +* `source`: The original source's filename; null if no filename is provided. * `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see below. * `name`: Optional. The original identifier. -#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer) +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) Creates a SourceNode from generated code and a SourceMapConsumer. @@ -328,6 +366,9 @@ Creates a SourceNode from generated code and a SourceMapConsumer. * `sourceMapConsumer` The SourceMap for the generated code +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + #### SourceNode.prototype.add(chunk) Add a chunk of generated JS to this source node. @@ -387,7 +428,7 @@ for trimming whitespace from the end of a source node, etc. Return the string representation of this source node. Walks over the tree and concatenates all the various snippets together to one string. -### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap) +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) Returns the string representation of this tree of source nodes, plus a SourceMapGenerator which contains all the mappings between the generated and diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/assert-shim.js b/node_modules/source-map/build/assert-shim.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/assert-shim.js rename to node_modules/source-map/build/assert-shim.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/mini-require.js b/node_modules/source-map/build/mini-require.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/mini-require.js rename to node_modules/source-map/build/mini-require.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/prefix-source-map.jsm b/node_modules/source-map/build/prefix-source-map.jsm similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/prefix-source-map.jsm rename to node_modules/source-map/build/prefix-source-map.jsm diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/prefix-utils.jsm b/node_modules/source-map/build/prefix-utils.jsm similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/prefix-utils.jsm rename to node_modules/source-map/build/prefix-utils.jsm diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-browser.js b/node_modules/source-map/build/suffix-browser.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-browser.js rename to node_modules/source-map/build/suffix-browser.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-source-map.jsm b/node_modules/source-map/build/suffix-source-map.jsm similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-source-map.jsm rename to node_modules/source-map/build/suffix-source-map.jsm diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-utils.jsm b/node_modules/source-map/build/suffix-utils.jsm similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/suffix-utils.jsm rename to node_modules/source-map/build/suffix-utils.jsm diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/test-prefix.js b/node_modules/source-map/build/test-prefix.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/test-prefix.js rename to node_modules/source-map/build/test-prefix.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/test-suffix.js b/node_modules/source-map/build/test-suffix.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/build/test-suffix.js rename to node_modules/source-map/build/test-suffix.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map.js b/node_modules/source-map/lib/source-map.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map.js rename to node_modules/source-map/lib/source-map.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/array-set.js b/node_modules/source-map/lib/source-map/array-set.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/array-set.js rename to node_modules/source-map/lib/source-map/array-set.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64-vlq.js b/node_modules/source-map/lib/source-map/base64-vlq.js similarity index 93% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64-vlq.js rename to node_modules/source-map/lib/source-map/base64-vlq.js index 1b67bb375..e22dcaeee 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64-vlq.js +++ b/node_modules/source-map/lib/source-map/base64-vlq.js @@ -66,7 +66,7 @@ define(function (require, exports, module) { /** * Converts from a two-complement value to a value where the sign bit is - * is placed in the least significant bit. For example, as decimals: + * placed in the least significant bit. For example, as decimals: * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) */ @@ -78,7 +78,7 @@ define(function (require, exports, module) { /** * Converts to a two-complement value from a value where the sign bit is - * is placed in the least significant bit. For example, as decimals: + * placed in the least significant bit. For example, as decimals: * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 */ @@ -115,9 +115,9 @@ define(function (require, exports, module) { /** * Decodes the next base 64 VLQ value from the given string and returns the - * value and the rest of the string. + * value and the rest of the string via the out parameter. */ - exports.decode = function base64VLQ_decode(aStr) { + exports.decode = function base64VLQ_decode(aStr, aOutParam) { var i = 0; var strLen = aStr.length; var result = 0; @@ -135,10 +135,8 @@ define(function (require, exports, module) { shift += VLQ_BASE_SHIFT; } while (continuation); - return { - value: fromVLQSigned(result), - rest: aStr.slice(i) - }; + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aStr.slice(i); }; }); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64.js b/node_modules/source-map/lib/source-map/base64.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64.js rename to node_modules/source-map/lib/source-map/base64.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/binary-search.js b/node_modules/source-map/lib/source-map/binary-search.js similarity index 80% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/binary-search.js rename to node_modules/source-map/lib/source-map/binary-search.js index ff347c68b..e085f8100 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/binary-search.js +++ b/node_modules/source-map/lib/source-map/binary-search.js @@ -23,17 +23,17 @@ define(function (require, exports, module) { // // 1. We find the exact element we are looking for. // - // 2. We did not find the exact element, but we can return the next - // closest element that is less than that element. + // 2. We did not find the exact element, but we can return the index of + // the next closest element that is less than that element. // // 3. We did not find the exact element, and there is no next-closest // element which is less than the one we are searching for, so we - // return null. + // return -1. var mid = Math.floor((aHigh - aLow) / 2) + aLow; var cmp = aCompare(aNeedle, aHaystack[mid], true); if (cmp === 0) { // Found the element we are looking for. - return aHaystack[mid]; + return mid; } else if (cmp > 0) { // aHaystack[mid] is greater than our needle. @@ -43,7 +43,7 @@ define(function (require, exports, module) { } // We did not find an exact match, return the next closest one // (termination case 2). - return aHaystack[mid]; + return mid; } else { // aHaystack[mid] is less than our needle. @@ -53,18 +53,16 @@ define(function (require, exports, module) { } // The exact needle element was not found in this haystack. Determine if // we are in termination case (2) or (3) and return the appropriate thing. - return aLow < 0 - ? null - : aHaystack[aLow]; + return aLow < 0 ? -1 : aLow; } } /** * This is an implementation of binary search which will always try and return - * the next lowest value checked if there is no exact hit. This is because - * mappings between original and generated line/col pairs are single points, - * and there is an implicit region between each of them, so a miss just means - * that you aren't on the very start of a region. + * the index of next lowest value checked if there is no exact hit. This is + * because mappings between original and generated line/col pairs are single + * points, and there is an implicit region between each of them, so a miss + * just means that you aren't on the very start of a region. * * @param aNeedle The element you are looking for. * @param aHaystack The array that is being searched. @@ -73,9 +71,10 @@ define(function (require, exports, module) { * than, equal to, or greater than the element, respectively. */ exports.search = function search(aNeedle, aHaystack, aCompare) { - return aHaystack.length > 0 - ? recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare) - : null; + if (aHaystack.length === 0) { + return -1; + } + return recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare) }; }); diff --git a/node_modules/source-map/lib/source-map/mapping-list.js b/node_modules/source-map/lib/source-map/mapping-list.js new file mode 100644 index 000000000..2a4eb6186 --- /dev/null +++ b/node_modules/source-map/lib/source-map/mapping-list.js @@ -0,0 +1,86 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ +if (typeof define !== 'function') { + var define = require('amdefine')(module, require); +} +define(function (require, exports, module) { + + var util = require('./util'); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositions(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + var mapping; + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositions); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + +}); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-consumer.js b/node_modules/source-map/lib/source-map/source-map-consumer.js similarity index 72% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-consumer.js rename to node_modules/source-map/lib/source-map/source-map-consumer.js index a3b9dc086..cfaa299a5 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-consumer.js +++ b/node_modules/source-map/lib/source-map/source-map-consumer.js @@ -29,7 +29,7 @@ define(function (require, exports, module) { * - sourceRoot: Optional. The URL root from which all sources are relative. * - sourcesContent: Optional. An array of contents of the original source files. * - mappings: A string of base64 VLQs which contain the actual mappings. - * - file: The generated file this source map is associated with. + * - file: Optional. The generated file this source map is associated with. * * Here is an example source map, taken from the source map spec[0]: * @@ -66,6 +66,11 @@ define(function (require, exports, module) { throw new Error('Unsupported version: ' + version); } + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + sources = sources.map(util.normalize); + // Pass `true` below to allow duplicate names and sources. While source maps // are intended to be compressed and deduplicated, the TypeScript compiler // sometimes generates source maps with duplicates in them. See Github issue @@ -97,9 +102,8 @@ define(function (require, exports, module) { smc.sourceRoot); smc.file = aSourceMap._file; - smc.__generatedMappings = aSourceMap._mappings.slice() - .sort(util.compareByGeneratedPositions); - smc.__originalMappings = aSourceMap._mappings.slice() + smc.__generatedMappings = aSourceMap._mappings.toArray().slice(); + smc.__originalMappings = aSourceMap._mappings.toArray().slice() .sort(util.compareByOriginalPositions); return smc; @@ -116,7 +120,7 @@ define(function (require, exports, module) { Object.defineProperty(SourceMapConsumer.prototype, 'sources', { get: function () { return this._sources.toArray().map(function (s) { - return this.sourceRoot ? util.join(this.sourceRoot, s) : s; + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; }, this); } }); @@ -177,6 +181,12 @@ define(function (require, exports, module) { } }); + SourceMapConsumer.prototype._nextCharIsMappingSeparator = + function SourceMapConsumer_nextCharIsMappingSeparator(aStr) { + var c = aStr.charAt(0); + return c === ";" || c === ","; + }; + /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and @@ -190,10 +200,9 @@ define(function (require, exports, module) { var previousOriginalColumn = 0; var previousSource = 0; var previousName = 0; - var mappingSeparator = /^[,;]/; var str = aStr; + var temp = {}; var mapping; - var temp; while (str.length > 0) { if (str.charAt(0) === ';') { @@ -209,41 +218,41 @@ define(function (require, exports, module) { mapping.generatedLine = generatedLine; // Generated column. - temp = base64VLQ.decode(str); + base64VLQ.decode(str, temp); mapping.generatedColumn = previousGeneratedColumn + temp.value; previousGeneratedColumn = mapping.generatedColumn; str = temp.rest; - if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) { + if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) { // Original source. - temp = base64VLQ.decode(str); + base64VLQ.decode(str, temp); mapping.source = this._sources.at(previousSource + temp.value); previousSource += temp.value; str = temp.rest; - if (str.length === 0 || mappingSeparator.test(str.charAt(0))) { + if (str.length === 0 || this._nextCharIsMappingSeparator(str)) { throw new Error('Found a source, but no line and column'); } // Original line. - temp = base64VLQ.decode(str); + base64VLQ.decode(str, temp); mapping.originalLine = previousOriginalLine + temp.value; previousOriginalLine = mapping.originalLine; // Lines are stored 0-based mapping.originalLine += 1; str = temp.rest; - if (str.length === 0 || mappingSeparator.test(str.charAt(0))) { + if (str.length === 0 || this._nextCharIsMappingSeparator(str)) { throw new Error('Found a source and line, but no column'); } // Original column. - temp = base64VLQ.decode(str); + base64VLQ.decode(str, temp); mapping.originalColumn = previousOriginalColumn + temp.value; previousOriginalColumn = mapping.originalColumn; str = temp.rest; - if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) { + if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) { // Original name. - temp = base64VLQ.decode(str); + base64VLQ.decode(str, temp); mapping.name = this._names.at(previousName + temp.value); previousName += temp.value; str = temp.rest; @@ -257,6 +266,7 @@ define(function (require, exports, module) { } } + this.__generatedMappings.sort(util.compareByGeneratedPositions); this.__originalMappings.sort(util.compareByOriginalPositions); }; @@ -284,6 +294,33 @@ define(function (require, exports, module) { return binarySearch.search(aNeedle, aMappings, aComparator); }; + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + SourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + /** * Returns the original source, line, and column information for the generated * source's line and column positions provided. The only argument is an object @@ -306,23 +343,27 @@ define(function (require, exports, module) { generatedColumn: util.getArg(aArgs, 'column') }; - var mapping = this._findMapping(needle, - this._generatedMappings, - "generatedLine", - "generatedColumn", - util.compareByGeneratedPositions); + var index = this._findMapping(needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositions); - if (mapping) { - var source = util.getArg(mapping, 'source', null); - if (source && this.sourceRoot) { - source = util.join(this.sourceRoot, source); + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source != null && this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: util.getArg(mapping, 'name', null) + }; } - return { - source: source, - line: util.getArg(mapping, 'originalLine', null), - column: util.getArg(mapping, 'originalColumn', null), - name: util.getArg(mapping, 'name', null) - }; } return { @@ -344,7 +385,7 @@ define(function (require, exports, module) { return null; } - if (this.sourceRoot) { + if (this.sourceRoot != null) { aSource = util.relative(this.sourceRoot, aSource); } @@ -353,7 +394,7 @@ define(function (require, exports, module) { } var url; - if (this.sourceRoot + if (this.sourceRoot != null && (url = util.urlParse(this.sourceRoot))) { // XXX: file:// URIs and absolute paths lead to unexpected behavior for // many users. We can help them out when they expect file:// URIs to @@ -396,29 +437,86 @@ define(function (require, exports, module) { originalColumn: util.getArg(aArgs, 'column') }; - if (this.sourceRoot) { + if (this.sourceRoot != null) { needle.source = util.relative(this.sourceRoot, needle.source); } - var mapping = this._findMapping(needle, - this._originalMappings, - "originalLine", - "originalColumn", - util.compareByOriginalPositions); + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions); + + if (index >= 0) { + var mapping = this._originalMappings[index]; - if (mapping) { return { line: util.getArg(mapping, 'generatedLine', null), - column: util.getArg(mapping, 'generatedColumn', null) + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }; } return { line: null, - column: null + column: null, + lastColumn: null }; }; + /** + * Returns all generated line and column information for the original source + * and line provided. The only argument is an object with the following + * properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + // When there is no exact match, SourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to Infinity, we thus find the last + // mapping for the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: util.getArg(aArgs, 'line'), + originalColumn: Infinity + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + while (mapping && mapping.originalLine === needle.originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[--index]; + } + } + + return mappings.reverse(); + }; + SourceMapConsumer.GENERATED_ORDER = 1; SourceMapConsumer.ORIGINAL_ORDER = 2; @@ -458,7 +556,7 @@ define(function (require, exports, module) { var sourceRoot = this.sourceRoot; mappings.map(function (mapping) { var source = mapping.source; - if (source && sourceRoot) { + if (source != null && sourceRoot != null) { source = util.join(sourceRoot, source); } return { diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-generator.js b/node_modules/source-map/lib/source-map/source-map-generator.js similarity index 76% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-generator.js rename to node_modules/source-map/lib/source-map/source-map-generator.js index 48ead7ddc..1ab7a47de 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-generator.js +++ b/node_modules/source-map/lib/source-map/source-map-generator.js @@ -12,21 +12,26 @@ define(function (require, exports, module) { var base64VLQ = require('./base64-vlq'); var util = require('./util'); var ArraySet = require('./array-set').ArraySet; + var MappingList = require('./mapping-list').MappingList; /** * An instance of the SourceMapGenerator represents a source map which is - * being built incrementally. To create a new one, you must pass an object - * with the following properties: + * being built incrementally. You may pass an object with the following + * properties: * * - file: The filename of the generated source. - * - sourceRoot: An optional root for all URLs in this source map. + * - sourceRoot: A root for all relative URLs in this source map. */ function SourceMapGenerator(aArgs) { - this._file = util.getArg(aArgs, 'file'); + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); this._sources = new ArraySet(); this._names = new ArraySet(); - this._mappings = []; + this._mappings = new MappingList(); this._sourcesContents = null; } @@ -52,9 +57,9 @@ define(function (require, exports, module) { } }; - if (mapping.source) { + if (mapping.source != null) { newMapping.source = mapping.source; - if (sourceRoot) { + if (sourceRoot != null) { newMapping.source = util.relative(sourceRoot, newMapping.source); } @@ -63,7 +68,7 @@ define(function (require, exports, module) { column: mapping.originalColumn }; - if (mapping.name) { + if (mapping.name != null) { newMapping.name = mapping.name; } } @@ -72,7 +77,7 @@ define(function (require, exports, module) { }); aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content) { + if (content != null) { generator.setSourceContent(sourceFile, content); } }); @@ -96,17 +101,19 @@ define(function (require, exports, module) { var source = util.getArg(aArgs, 'source', null); var name = util.getArg(aArgs, 'name', null); - this._validateMapping(generated, original, source, name); + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } - if (source && !this._sources.has(source)) { + if (source != null && !this._sources.has(source)) { this._sources.add(source); } - if (name && !this._names.has(name)) { + if (name != null && !this._names.has(name)) { this._names.add(name); } - this._mappings.push({ + this._mappings.add({ generatedLine: generated.line, generatedColumn: generated.column, originalLine: original != null && original.line, @@ -122,18 +129,18 @@ define(function (require, exports, module) { SourceMapGenerator.prototype.setSourceContent = function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { var source = aSourceFile; - if (this._sourceRoot) { + if (this._sourceRoot != null) { source = util.relative(this._sourceRoot, source); } - if (aSourceContent !== null) { + if (aSourceContent != null) { // Add the source content to the _sourcesContents map. // Create a new _sourcesContents map if the property is null. if (!this._sourcesContents) { this._sourcesContents = {}; } this._sourcesContents[util.toSetString(source)] = aSourceContent; - } else { + } else if (this._sourcesContents) { // Remove the source file from the _sourcesContents map. // If the _sourcesContents map is empty, set the property to null. delete this._sourcesContents[util.toSetString(source)]; @@ -152,55 +159,68 @@ define(function (require, exports, module) { * @param aSourceMapConsumer The source map to be applied. * @param aSourceFile Optional. The filename of the source file. * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. */ SourceMapGenerator.prototype.applySourceMap = - function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile) { + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; // If aSourceFile is omitted, we will use the file property of the SourceMap - if (!aSourceFile) { - aSourceFile = aSourceMapConsumer.file; + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; } var sourceRoot = this._sourceRoot; - // Make "aSourceFile" relative if an absolute Url is passed. - if (sourceRoot) { - aSourceFile = util.relative(sourceRoot, aSourceFile); + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); } // Applying the SourceMap can add and remove items from the sources and // the names array. var newSources = new ArraySet(); var newNames = new ArraySet(); - // Find mappings for the "aSourceFile" - this._mappings.forEach(function (mapping) { - if (mapping.source === aSourceFile && mapping.originalLine) { + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { // Check if it can be mapped by the source map, then update the mapping. var original = aSourceMapConsumer.originalPositionFor({ line: mapping.originalLine, column: mapping.originalColumn }); - if (original.source !== null) { + if (original.source != null) { // Copy mapping - if (sourceRoot) { - mapping.source = util.relative(sourceRoot, original.source); - } else { - mapping.source = original.source; + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); } mapping.originalLine = original.line; mapping.originalColumn = original.column; - if (original.name !== null && mapping.name !== null) { - // Only use the identifier name if it's an identifier - // in both SourceMaps + if (original.name != null) { mapping.name = original.name; } } } var source = mapping.source; - if (source && !newSources.has(source)) { + if (source != null && !newSources.has(source)) { newSources.add(source); } var name = mapping.name; - if (name && !newNames.has(name)) { + if (name != null && !newNames.has(name)) { newNames.add(name); } @@ -211,8 +231,11 @@ define(function (require, exports, module) { // Copy sourcesContents of applied map. aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content) { - if (sourceRoot) { + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { sourceFile = util.relative(sourceRoot, sourceFile); } this.setSourceContent(sourceFile, content); @@ -252,7 +275,7 @@ define(function (require, exports, module) { throw new Error('Invalid mapping: ' + JSON.stringify({ generated: aGenerated, source: aSource, - orginal: aOriginal, + original: aOriginal, name: aName })); } @@ -273,15 +296,10 @@ define(function (require, exports, module) { var result = ''; var mapping; - // The mappings must be guaranteed to be in sorted order before we start - // serializing them or else the generated line numbers (which are defined - // via the ';' separators) will be all messed up. Note: it might be more - // performant to maintain the sorting as we insert them, rather than as we - // serialize them, but the big O is the same either way. - this._mappings.sort(util.compareByGeneratedPositions); + var mappings = this._mappings.toArray(); - for (var i = 0, len = this._mappings.length; i < len; i++) { - mapping = this._mappings[i]; + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; if (mapping.generatedLine !== previousGeneratedLine) { previousGeneratedColumn = 0; @@ -292,7 +310,7 @@ define(function (require, exports, module) { } else { if (i > 0) { - if (!util.compareByGeneratedPositions(mapping, this._mappings[i - 1])) { + if (!util.compareByGeneratedPositions(mapping, mappings[i - 1])) { continue; } result += ','; @@ -303,7 +321,7 @@ define(function (require, exports, module) { - previousGeneratedColumn); previousGeneratedColumn = mapping.generatedColumn; - if (mapping.source) { + if (mapping.source != null) { result += base64VLQ.encode(this._sources.indexOf(mapping.source) - previousSource); previousSource = this._sources.indexOf(mapping.source); @@ -317,7 +335,7 @@ define(function (require, exports, module) { - previousOriginalColumn); previousOriginalColumn = mapping.originalColumn; - if (mapping.name) { + if (mapping.name != null) { result += base64VLQ.encode(this._names.indexOf(mapping.name) - previousName); previousName = this._names.indexOf(mapping.name); @@ -334,7 +352,7 @@ define(function (require, exports, module) { if (!this._sourcesContents) { return null; } - if (aSourceRoot) { + if (aSourceRoot != null) { source = util.relative(aSourceRoot, source); } var key = util.toSetString(source); @@ -352,12 +370,14 @@ define(function (require, exports, module) { function SourceMapGenerator_toJSON() { var map = { version: this._version, - file: this._file, sources: this._sources.toArray(), names: this._names.toArray(), mappings: this._serializeMappings() }; - if (this._sourceRoot) { + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { map.sourceRoot = this._sourceRoot; } if (this._sourcesContents) { diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-node.js b/node_modules/source-map/lib/source-map/source-node.js similarity index 72% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-node.js rename to node_modules/source-map/lib/source-map/source-node.js index 626cb6574..9ee90bd56 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-node.js +++ b/node_modules/source-map/lib/source-map/source-node.js @@ -12,6 +12,18 @@ define(function (require, exports, module) { var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; var util = require('./util'); + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + /** * SourceNodes provide a way to abstract over interpolating/concatenating * snippets of generated JavaScript source code while maintaining the line and @@ -27,10 +39,11 @@ define(function (require, exports, module) { function SourceNode(aLine, aColumn, aSource, aChunks, aName) { this.children = []; this.sourceContents = {}; - this.line = aLine === undefined ? null : aLine; - this.column = aColumn === undefined ? null : aColumn; - this.source = aSource === undefined ? null : aSource; - this.name = aName === undefined ? null : aName; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; if (aChunks != null) this.add(aChunks); } @@ -39,16 +52,26 @@ define(function (require, exports, module) { * * @param aGeneratedCode The generated code * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. */ SourceNode.fromStringWithSourceMap = - function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer) { + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { // The SourceNode we want to fill with the generated code // and the SourceMap var node = new SourceNode(); - // The generated code - // Processed fragments are removed from this array. - var remainingLines = aGeneratedCode.split('\n'); + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are removed from this array, by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var shiftNextLine = function() { + var lineContents = remainingLines.shift(); + // The last line of a file might not have a newline. + var newLine = remainingLines.shift() || ""; + return lineContents + newLine; + }; // We need to remember the position of "remainingLines" var lastGeneratedLine = 1, lastGeneratedColumn = 0; @@ -59,41 +82,16 @@ define(function (require, exports, module) { var lastMapping = null; aSourceMapConsumer.eachMapping(function (mapping) { - if (lastMapping === null) { - // We add the generated code until the first mapping - // to the SourceNode without any mapping. - // Each line is added as separate string. - while (lastGeneratedLine < mapping.generatedLine) { - node.add(remainingLines.shift() + "\n"); - lastGeneratedLine++; - } - if (lastGeneratedColumn < mapping.generatedColumn) { - var nextLine = remainingLines[0]; - node.add(nextLine.substr(0, mapping.generatedColumn)); - remainingLines[0] = nextLine.substr(mapping.generatedColumn); - lastGeneratedColumn = mapping.generatedColumn; - } - } else { + if (lastMapping !== null) { // We add the code from "lastMapping" to "mapping": // First check if there is a new line in between. if (lastGeneratedLine < mapping.generatedLine) { var code = ""; - // Associate full lines with "lastMapping" - do { - code += remainingLines.shift() + "\n"; - lastGeneratedLine++; - lastGeneratedColumn = 0; - } while (lastGeneratedLine < mapping.generatedLine); - // When we reached the correct line, we add code until we - // reach the correct column too. - if (lastGeneratedColumn < mapping.generatedColumn) { - var nextLine = remainingLines[0]; - code += nextLine.substr(0, mapping.generatedColumn); - remainingLines[0] = nextLine.substr(mapping.generatedColumn); - lastGeneratedColumn = mapping.generatedColumn; - } - // Create the SourceNode. - addMappingWithCode(lastMapping, code); + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping } else { // There is no new line in between. // Associate the code between "lastGeneratedColumn" and @@ -105,19 +103,43 @@ define(function (require, exports, module) { lastGeneratedColumn); lastGeneratedColumn = mapping.generatedColumn; addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; } } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[0]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[0] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } lastMapping = mapping; }, this); // We have processed all mappings. - // Associate the remaining code in the current line with "lastMapping" - // and add the remaining lines without any mapping - addMappingWithCode(lastMapping, remainingLines.join("\n")); + if (remainingLines.length > 0) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.join("")); + } // Copy sourcesContent into SourceNode aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content) { + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } node.setSourceContent(sourceFile, content); } }); @@ -128,9 +150,12 @@ define(function (require, exports, module) { if (mapping === null || mapping.source === undefined) { node.add(code); } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; node.add(new SourceNode(mapping.originalLine, mapping.originalColumn, - mapping.source, + source, code, mapping.name)); } @@ -149,7 +174,7 @@ define(function (require, exports, module) { this.add(chunk); }, this); } - else if (aChunk instanceof SourceNode || typeof aChunk === "string") { + else if (aChunk[isSourceNode] || typeof aChunk === "string") { if (aChunk) { this.children.push(aChunk); } @@ -174,7 +199,7 @@ define(function (require, exports, module) { this.prepend(aChunk[i]); } } - else if (aChunk instanceof SourceNode || typeof aChunk === "string") { + else if (aChunk[isSourceNode] || typeof aChunk === "string") { this.children.unshift(aChunk); } else { @@ -196,7 +221,7 @@ define(function (require, exports, module) { var chunk; for (var i = 0, len = this.children.length; i < len; i++) { chunk = this.children[i]; - if (chunk instanceof SourceNode) { + if (chunk[isSourceNode]) { chunk.walk(aFn); } else { @@ -241,7 +266,7 @@ define(function (require, exports, module) { */ SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { var lastChild = this.children[this.children.length - 1]; - if (lastChild instanceof SourceNode) { + if (lastChild[isSourceNode]) { lastChild.replaceRight(aPattern, aReplacement); } else if (typeof lastChild === 'string') { @@ -274,7 +299,7 @@ define(function (require, exports, module) { SourceNode.prototype.walkSourceContents = function SourceNode_walkSourceContents(aFn) { for (var i = 0, len = this.children.length; i < len; i++) { - if (this.children[i] instanceof SourceNode) { + if (this.children[i][isSourceNode]) { this.children[i].walkSourceContents(aFn); } } @@ -350,14 +375,32 @@ define(function (require, exports, module) { lastOriginalSource = null; sourceMappingActive = false; } - chunk.split('').forEach(function (ch) { - if (ch === '\n') { + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { generated.line++; generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } } else { generated.column++; } - }); + } }); this.walkSourceContents(function (sourceFile, sourceContent) { map.setSourceContent(sourceFile, sourceContent); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/util.js b/node_modules/source-map/lib/source-map/util.js similarity index 57% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/util.js rename to node_modules/source-map/lib/source-map/util.js index 87946d3f7..976f6cabb 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/lib/source-map/util.js +++ b/node_modules/source-map/lib/source-map/util.js @@ -30,8 +30,8 @@ define(function (require, exports, module) { } exports.getArg = getArg; - var urlRegexp = /([\w+\-.]+):\/\/((\w+:\w+)@)?([\w.]+)?(:(\d+))?(\S+)?/; - var dataUrlRegexp = /^data:.+\,.+/; + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; function urlParse(aUrl) { var match = aUrl.match(urlRegexp); @@ -40,18 +40,22 @@ define(function (require, exports, module) { } return { scheme: match[1], - auth: match[3], - host: match[4], - port: match[6], - path: match[7] + auth: match[2], + host: match[3], + port: match[4], + path: match[5] }; } exports.urlParse = urlParse; function urlGenerate(aParsedUrl) { - var url = aParsedUrl.scheme + "://"; + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; if (aParsedUrl.auth) { - url += aParsedUrl.auth + "@" + url += aParsedUrl.auth + '@'; } if (aParsedUrl.host) { url += aParsedUrl.host; @@ -66,22 +70,146 @@ define(function (require, exports, module) { } exports.urlGenerate = urlGenerate; + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consequtive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = (path.charAt(0) === '/'); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ function join(aRoot, aPath) { - var url; + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } - if (aPath.match(urlRegexp) || aPath.match(dataUrlRegexp)) { + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { return aPath; } - if (aPath.charAt(0) === '/' && (url = urlParse(aRoot))) { - url.path = aPath; - return urlGenerate(url); + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); } - return aRoot.replace(/\/$/, '') + '/' + aPath; + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; } exports.join = join; + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // XXX: It is possible to remove this block, and the tests still pass! + var url = urlParse(aRoot); + if (aPath.charAt(0) == "/" && url && url.path == "/") { + return aPath.slice(1); + } + + return aPath.indexOf(aRoot + '/') === 0 + ? aPath.substr(aRoot.length + 1) + : aPath; + } + exports.relative = relative; + /** * Because behavior goes wacky when you set `__proto__` on objects, we * have to prefix all the strings in our set with an arbitrary character. @@ -101,20 +229,6 @@ define(function (require, exports, module) { } exports.fromSetString = fromSetString; - function relative(aRoot, aPath) { - aRoot = aRoot.replace(/\/$/, ''); - - var url = urlParse(aRoot); - if (aPath.charAt(0) == "/" && url && url.path == "/") { - return aPath.slice(1); - } - - return aPath.indexOf(aRoot + '/') === 0 - ? aPath.substr(aRoot.length + 1) - : aPath; - } - exports.relative = relative; - function strcmp(aStr1, aStr2) { var s1 = aStr1 || ""; var s2 = aStr2 || ""; diff --git a/node_modules/source-map/package.json b/node_modules/source-map/package.json new file mode 100644 index 000000000..81fc58a61 --- /dev/null +++ b/node_modules/source-map/package.json @@ -0,0 +1,183 @@ +{ + "_from": "source-map@~0.1.7", + "_id": "source-map@0.1.43", + "_inBundle": false, + "_integrity": "sha1-wkvBRspRfBRx9drL4lcbK3+eM0Y=", + "_location": "/source-map", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "source-map@~0.1.7", + "name": "source-map", + "escapedName": "source-map", + "rawSpec": "~0.1.7", + "saveSpec": null, + "fetchSpec": "~0.1.7" + }, + "_requiredBy": [ + "/uglify-js" + ], + "_resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", + "_shasum": "c24bc146ca517c1471f5dacbe2571b2b7f9e3346", + "_spec": "source-map@~0.1.7", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/uglify-js", + "author": { + "name": "Nick Fitzgerald", + "email": "nfitzgerald@mozilla.com" + }, + "bugs": { + "url": "https://github.com/mozilla/source-map/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Tobias Koppers", + "email": "tobias.koppers@googlemail.com" + }, + { + "name": "Duncan Beevers", + "email": "duncan@dweebd.com" + }, + { + "name": "Stephen Crane", + "email": "scrane@mozilla.com" + }, + { + "name": "Ryan Seddon", + "email": "seddon.ryan@gmail.com" + }, + { + "name": "Miles Elam", + "email": "miles.elam@deem.com" + }, + { + "name": "Mihai Bazon", + "email": "mihai.bazon@gmail.com" + }, + { + "name": "Michael Ficarra", + "email": "github.public.email@michael.ficarra.me" + }, + { + "name": "Todd Wolfson", + "email": "todd@twolfson.com" + }, + { + "name": "Alexander Solovyov", + "email": "alexander@solovyov.net" + }, + { + "name": "Felix Gnass", + "email": "fgnass@gmail.com" + }, + { + "name": "Conrad Irwin", + "email": "conrad.irwin@gmail.com" + }, + { + "name": "usrbincc", + "email": "usrbincc@yahoo.com" + }, + { + "name": "David Glasser", + "email": "glasser@davidglasser.net" + }, + { + "name": "Chase Douglas", + "email": "chase@newrelic.com" + }, + { + "name": "Evan Wallace", + "email": "evan.exe@gmail.com" + }, + { + "name": "Heather Arthur", + "email": "fayearthur@gmail.com" + }, + { + "name": "Hugh Kennedy", + "email": "hughskennedy@gmail.com" + }, + { + "name": "David Glasser", + "email": "glasser@davidglasser.net" + }, + { + "name": "Simon Lydell", + "email": "simon.lydell@gmail.com" + }, + { + "name": "Jmeas Smith", + "email": "jellyes2@gmail.com" + }, + { + "name": "Michael Z Goddard", + "email": "mzgoddard@gmail.com" + }, + { + "name": "azu", + "email": "azu@users.noreply.github.com" + }, + { + "name": "John Gozde", + "email": "john@gozde.ca" + }, + { + "name": "Adam Kirkton", + "email": "akirkton@truefitinnovation.com" + }, + { + "name": "Chris Montgomery", + "email": "christopher.montgomery@dowjones.com" + }, + { + "name": "J. Ryan Stinnett", + "email": "jryans@gmail.com" + }, + { + "name": "Jack Herrington", + "email": "jherrington@walmartlabs.com" + }, + { + "name": "Chris Truter", + "email": "jeffpalentine@gmail.com" + }, + { + "name": "Daniel Espeset", + "email": "daniel@danielespeset.com" + } + ], + "dependencies": { + "amdefine": ">=0.0.4" + }, + "deprecated": false, + "description": "Generates and consumes source maps", + "devDependencies": { + "dryice": ">=0.4.8" + }, + "directories": { + "lib": "./lib" + }, + "engines": { + "node": ">=0.8.0" + }, + "homepage": "https://github.com/mozilla/source-map", + "licenses": [ + { + "type": "BSD", + "url": "http://opensource.org/licenses/BSD-3-Clause" + } + ], + "main": "./lib/source-map.js", + "name": "source-map", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mozilla/source-map.git" + }, + "scripts": { + "build": "node Makefile.dryice.js", + "test": "node test/run-tests.js" + }, + "version": "0.1.43" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/run-tests.js b/node_modules/source-map/test/run-tests.js old mode 100644 new mode 100755 similarity index 82% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/run-tests.js rename to node_modules/source-map/test/run-tests.js index 626c53f70..64a7c3a3d --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/run-tests.js +++ b/node_modules/source-map/test/run-tests.js @@ -11,7 +11,6 @@ var path = require('path'); var util = require('./source-map/util'); function run(tests) { - var failures = []; var total = 0; var passed = 0; @@ -31,22 +30,13 @@ function run(tests) { } } - console.log(""); + console.log(''); console.log(passed + ' / ' + total + ' tests passed.'); - console.log(""); + console.log(''); - failures.forEach(function (f) { - }); - - return failures.length; + return total - passed; } -var code; - -process.stdout.on('close', function () { - process.exit(code); -}); - function isTestFile(f) { var testToRun = process.argv[2]; return testToRun @@ -62,10 +52,11 @@ var requires = fs.readdirSync(path.join(__dirname, 'source-map')) .filter(isTestFile) .map(toModule); -code = run(requires.map(require).map(function (mod, i) { +var code = run(requires.map(require).map(function (mod, i) { return { name: requires[i], testCase: mod }; })); + process.exit(code); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-api.js b/node_modules/source-map/test/source-map/test-api.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-api.js rename to node_modules/source-map/test/source-map/test-api.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-array-set.js b/node_modules/source-map/test/source-map/test-array-set.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-array-set.js rename to node_modules/source-map/test/source-map/test-array-set.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64-vlq.js b/node_modules/source-map/test/source-map/test-base64-vlq.js similarity index 86% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64-vlq.js rename to node_modules/source-map/test/source-map/test-base64-vlq.js index 653a874e9..6fd0d99f4 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64-vlq.js +++ b/node_modules/source-map/test/source-map/test-base64-vlq.js @@ -12,10 +12,9 @@ define(function (require, exports, module) { var base64VLQ = require('../../lib/source-map/base64-vlq'); exports['test normal encoding and decoding'] = function (assert, util) { - var result; + var result = {}; for (var i = -255; i < 256; i++) { - result = base64VLQ.decode(base64VLQ.encode(i)); - assert.ok(result); + base64VLQ.decode(base64VLQ.encode(i), result); assert.equal(result.value, i); assert.equal(result.rest, ""); } diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64.js b/node_modules/source-map/test/source-map/test-base64.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64.js rename to node_modules/source-map/test/source-map/test-base64.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-binary-search.js b/node_modules/source-map/test/source-map/test-binary-search.js similarity index 82% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-binary-search.js rename to node_modules/source-map/test/source-map/test-binary-search.js index ee306830d..f1c9e0fc5 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-binary-search.js +++ b/node_modules/source-map/test/source-map/test-binary-search.js @@ -23,7 +23,7 @@ define(function (require, exports, module) { binarySearch.search(needle, haystack, numberCompare); }); - assert.equal(binarySearch.search(needle, haystack, numberCompare), 20); + assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 20); }; exports['test too low'] = function (assert, util) { @@ -34,21 +34,21 @@ define(function (require, exports, module) { binarySearch.search(needle, haystack, numberCompare); }); - assert.equal(binarySearch.search(needle, haystack, numberCompare), null); + assert.equal(binarySearch.search(needle, haystack, numberCompare), -1); }; exports['test exact search'] = function (assert, util) { var needle = 4; var haystack = [2,4,6,8,10,12,14,16,18,20]; - assert.equal(binarySearch.search(needle, haystack, numberCompare), 4); + assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 4); }; exports['test fuzzy search'] = function (assert, util) { var needle = 19; var haystack = [2,4,6,8,10,12,14,16,18,20]; - assert.equal(binarySearch.search(needle, haystack, numberCompare), 18); + assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 18); }; }); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-dog-fooding.js b/node_modules/source-map/test/source-map/test-dog-fooding.js similarity index 74% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-dog-fooding.js rename to node_modules/source-map/test/source-map/test-dog-fooding.js index d831b9262..26757b2d1 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-dog-fooding.js +++ b/node_modules/source-map/test/source-map/test-dog-fooding.js @@ -42,6 +42,12 @@ define(function (require, exports, module) { generated: { line: 5, column: 2 } }); + smg.addMapping({ + source: 'gza.coffee', + original: { line: 5, column: 10 }, + generated: { line: 6, column: 12 } + }); + var smc = new SourceMapConsumer(smg.toString()); // Exact @@ -49,24 +55,30 @@ define(function (require, exports, module) { util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert); util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert); util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert); + util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert); // Fuzzy - // Original to generated + // Generated to original util.assertMapping(2, 0, null, null, null, null, smc, assert, true); util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true); - util.assertMapping(3, 0, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true); + util.assertMapping(3, 0, null, null, null, null, smc, assert, true); util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true); - util.assertMapping(4, 0, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true); + util.assertMapping(4, 0, null, null, null, null, smc, assert, true); util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true); - util.assertMapping(5, 0, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true); + util.assertMapping(5, 0, null, null, null, null, smc, assert, true); util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true); + util.assertMapping(6, 0, null, null, null, null, smc, assert, true); + util.assertMapping(6, 9, null, null, null, null, smc, assert, true); + util.assertMapping(6, 13, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true); - // Generated to original + // Original to generated util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true); util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true); util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true); util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true); + util.assertMapping(5, 2, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true); + util.assertMapping(6, 12, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true); }; }); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-consumer.js b/node_modules/source-map/test/source-map/test-source-map-consumer.js similarity index 65% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-consumer.js rename to node_modules/source-map/test/source-map/test-source-map-consumer.js index f2c65a7f0..c71494348 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-consumer.js +++ b/node_modules/source-map/test/source-map/test-source-map-consumer.js @@ -12,7 +12,7 @@ define(function (require, exports, module) { var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer; var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator; - exports['test that we can instantiate with a string or an objects'] = function (assert, util) { + exports['test that we can instantiate with a string or an object'] = function (assert, util) { assert.doesNotThrow(function () { var map = new SourceMapConsumer(util.testMap); }); @@ -22,18 +22,34 @@ define(function (require, exports, module) { }; exports['test that the `sources` field has the original sources'] = function (assert, util) { - var map = new SourceMapConsumer(util.testMap); - var sources = map.sources; + var map; + var sources; + map = new SourceMapConsumer(util.testMap); + sources = map.sources; assert.equal(sources[0], '/the/root/one.js'); assert.equal(sources[1], '/the/root/two.js'); assert.equal(sources.length, 2); + + map = new SourceMapConsumer(util.testMapNoSourceRoot); + sources = map.sources; + assert.equal(sources[0], 'one.js'); + assert.equal(sources[1], 'two.js'); + assert.equal(sources.length, 2); + + map = new SourceMapConsumer(util.testMapEmptySourceRoot); + sources = map.sources; + assert.equal(sources[0], 'one.js'); + assert.equal(sources[1], 'two.js'); + assert.equal(sources.length, 2); }; exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) { - var map = new SourceMapConsumer(util.testMap); + var map; var mapping; + map = new SourceMapConsumer(util.testMap); + mapping = map.originalPositionFor({ line: 2, column: 1 @@ -45,6 +61,36 @@ define(function (require, exports, module) { column: 1 }); assert.equal(mapping.source, '/the/root/one.js'); + + + map = new SourceMapConsumer(util.testMapNoSourceRoot); + + mapping = map.originalPositionFor({ + line: 2, + column: 1 + }); + assert.equal(mapping.source, 'two.js'); + + mapping = map.originalPositionFor({ + line: 1, + column: 1 + }); + assert.equal(mapping.source, 'one.js'); + + + map = new SourceMapConsumer(util.testMapEmptySourceRoot); + + mapping = map.originalPositionFor({ + line: 2, + column: 1 + }); + assert.equal(mapping.source, 'two.js'); + + mapping = map.originalPositionFor({ + line: 1, + column: 1 + }); + assert.equal(mapping.source, 'one.js'); }; exports['test mapping tokens back exactly'] = function (assert, util) { @@ -80,6 +126,30 @@ define(function (require, exports, module) { util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true); }; + exports['test mappings and end of lines'] = function (assert, util) { + var smg = new SourceMapGenerator({ + file: 'foo.js' + }); + smg.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 1, column: 1 }, + source: 'bar.js' + }); + smg.addMapping({ + original: { line: 2, column: 2 }, + generated: { line: 2, column: 2 }, + source: 'bar.js' + }); + + var map = SourceMapConsumer.fromSourceMap(smg); + + // When finding original positions, mappings end at the end of the line. + util.assertMapping(2, 1, null, null, null, null, map, assert, true) + + // When finding generated positions, mappings do not end at the end of the line. + util.assertMapping(1, 1, 'bar.js', 2, 1, null, map, assert, null, true); + }; + exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) { assert.doesNotThrow(function () { var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap)); @@ -87,15 +157,15 @@ define(function (require, exports, module) { }; exports['test eachMapping'] = function (assert, util) { - var map = new SourceMapConsumer(util.testMap); + var map; + + map = new SourceMapConsumer(util.testMap); var previousLine = -Infinity; var previousColumn = -Infinity; map.eachMapping(function (mapping) { assert.ok(mapping.generatedLine >= previousLine); - if (mapping.source) { - assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0); - } + assert.ok(mapping.source === '/the/root/one.js' || mapping.source === '/the/root/two.js'); if (mapping.generatedLine === previousLine) { assert.ok(mapping.generatedColumn >= previousColumn); @@ -106,6 +176,16 @@ define(function (require, exports, module) { previousColumn = -Infinity; } }); + + map = new SourceMapConsumer(util.testMapNoSourceRoot); + map.eachMapping(function (mapping) { + assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js'); + }); + + map = new SourceMapConsumer(util.testMapEmptySourceRoot); + map.eachMapping(function (mapping) { + assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js'); + }); }; exports['test iterating over mappings in a different order'] = function (assert, util) { @@ -172,6 +252,25 @@ define(function (require, exports, module) { }, Error); }; + exports['test that we can get the original source content with relative source paths'] = function (assert, util) { + var map = new SourceMapConsumer(util.testMapRelativeSources); + var sources = map.sources; + + assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };'); + assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };'); + assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };'); + assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };'); + assert.throws(function () { + map.sourceContentFor(""); + }, Error); + assert.throws(function () { + map.sourceContentFor("/the/root/three.js"); + }, Error); + assert.throws(function () { + map.sourceContentFor("three.js"); + }, Error); + }; + exports['test sourceRoot + generatedPositionFor'] = function (assert, util) { var map = new SourceMapGenerator({ sourceRoot: 'foo/bar', @@ -210,6 +309,158 @@ define(function (require, exports, module) { assert.equal(pos.column, 2); }; + exports['test allGeneratedPositionsFor'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated.js' + }); + map.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 2, column: 2 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 2, column: 2 }, + source: 'bar.coffee' + }); + map.addMapping({ + original: { line: 2, column: 1 }, + generated: { line: 3, column: 2 }, + source: 'bar.coffee' + }); + map.addMapping({ + original: { line: 2, column: 2 }, + generated: { line: 3, column: 3 }, + source: 'bar.coffee' + }); + map.addMapping({ + original: { line: 3, column: 1 }, + generated: { line: 4, column: 2 }, + source: 'bar.coffee' + }); + map = new SourceMapConsumer(map.toString()); + + var mappings = map.allGeneratedPositionsFor({ + line: 2, + source: 'bar.coffee' + }); + + assert.equal(mappings.length, 2); + assert.equal(mappings[0].line, 3); + assert.equal(mappings[0].column, 2); + assert.equal(mappings[1].line, 3); + assert.equal(mappings[1].column, 3); + }; + + exports['test allGeneratedPositionsFor for line with no mappings'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated.js' + }); + map.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 2, column: 2 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 2, column: 2 }, + source: 'bar.coffee' + }); + map.addMapping({ + original: { line: 3, column: 1 }, + generated: { line: 4, column: 2 }, + source: 'bar.coffee' + }); + map = new SourceMapConsumer(map.toString()); + + var mappings = map.allGeneratedPositionsFor({ + line: 2, + source: 'bar.coffee' + }); + + assert.equal(mappings.length, 0); + }; + + exports['test allGeneratedPositionsFor source map with no mappings'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated.js' + }); + map = new SourceMapConsumer(map.toString()); + + var mappings = map.allGeneratedPositionsFor({ + line: 2, + source: 'bar.coffee' + }); + + assert.equal(mappings.length, 0); + }; + + exports['test computeColumnSpans'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated.js' + }); + map.addMapping({ + original: { line: 1, column: 1 }, + generated: { line: 1, column: 1 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 2, column: 1 }, + generated: { line: 2, column: 1 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 2, column: 2 }, + generated: { line: 2, column: 10 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 2, column: 3 }, + generated: { line: 2, column: 20 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 3, column: 1 }, + generated: { line: 3, column: 1 }, + source: 'foo.coffee' + }); + map.addMapping({ + original: { line: 3, column: 2 }, + generated: { line: 3, column: 2 }, + source: 'foo.coffee' + }); + map = new SourceMapConsumer(map.toString()); + + map.computeColumnSpans(); + + var mappings = map.allGeneratedPositionsFor({ + line: 1, + source: 'foo.coffee' + }); + + assert.equal(mappings.length, 1); + assert.equal(mappings[0].lastColumn, Infinity); + + var mappings = map.allGeneratedPositionsFor({ + line: 2, + source: 'foo.coffee' + }); + + assert.equal(mappings.length, 3); + assert.equal(mappings[0].lastColumn, 9); + assert.equal(mappings[1].lastColumn, 19); + assert.equal(mappings[2].lastColumn, Infinity); + + var mappings = map.allGeneratedPositionsFor({ + line: 3, + source: 'foo.coffee' + }); + + assert.equal(mappings.length, 2); + assert.equal(mappings[0].lastColumn, 1); + assert.equal(mappings[1].lastColumn, Infinity); + }; + exports['test sourceRoot + originalPositionFor'] = function (assert, util) { var map = new SourceMapGenerator({ sourceRoot: 'foo/bar', diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-generator.js b/node_modules/source-map/test/source-map/test-source-map-generator.js similarity index 58% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-generator.js rename to node_modules/source-map/test/source-map/test-source-map-generator.js index ba292f548..d748bb185 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-generator.js +++ b/node_modules/source-map/test/source-map/test-source-map-generator.js @@ -20,6 +20,10 @@ define(function (require, exports, module) { sourceRoot: '.' }); assert.ok(true); + + var map = new SourceMapGenerator().toJSON(); + assert.ok(!('file' in map)); + assert.ok(!('sourceRoot' in map)); }; exports['test JSON serialization'] = function (assert, util) { @@ -94,6 +98,27 @@ define(function (require, exports, module) { }); }; + exports['test adding mappings with skipValidation'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated-foo.js', + sourceRoot: '.', + skipValidation: true + }); + + // Not enough info, caught by `util.getArgs` + assert.throws(function () { + map.addMapping({}); + }); + + // Original file position, but no source. Not checked. + assert.doesNotThrow(function () { + map.addMapping({ + generated: { line: 1, column: 1 }, + original: { line: 1, column: 1 } + }); + }); + }; + exports['test that the correct mappings are being generated'] = function (assert, util) { var map = new SourceMapGenerator({ file: 'min.js', @@ -177,6 +202,24 @@ define(function (require, exports, module) { util.assertEqualMaps(assert, map, util.testMap); }; + exports['test that adding a mapping with an empty string name does not break generation'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'generated-foo.js', + sourceRoot: '.' + }); + + map.addMapping({ + generated: { line: 1, column: 1 }, + source: 'bar.js', + original: { line: 1, column: 1 }, + name: '' + }); + + assert.doesNotThrow(function () { + JSON.parse(map.toString()); + }); + }; + exports['test that source content can be set'] = function (assert, util) { var map = new SourceMapGenerator({ file: 'min.js', @@ -269,6 +312,218 @@ define(function (require, exports, module) { util.assertEqualMaps(assert, actualMap, expectedMap); }; + exports['test applySourceMap throws when file is missing'] = function (assert, util) { + var map = new SourceMapGenerator({ + file: 'test.js' + }); + var map2 = new SourceMapGenerator(); + assert.throws(function() { + map.applySourceMap(new SourceMapConsumer(map2.toJSON())); + }); + }; + + exports['test the two additional parameters of applySourceMap'] = function (assert, util) { + // Assume the following directory structure: + // + // http://foo.org/ + // bar.coffee + // app/ + // coffee/ + // foo.coffee + // temp/ + // bundle.js + // temp_maps/ + // bundle.js.map + // public/ + // bundle.min.js + // bundle.min.js.map + // + // http://www.example.com/ + // baz.coffee + + var bundleMap = new SourceMapGenerator({ + file: 'bundle.js' + }); + bundleMap.addMapping({ + generated: { line: 3, column: 3 }, + original: { line: 2, column: 2 }, + source: '../../coffee/foo.coffee' + }); + bundleMap.setSourceContent('../../coffee/foo.coffee', 'foo coffee'); + bundleMap.addMapping({ + generated: { line: 13, column: 13 }, + original: { line: 12, column: 12 }, + source: '/bar.coffee' + }); + bundleMap.setSourceContent('/bar.coffee', 'bar coffee'); + bundleMap.addMapping({ + generated: { line: 23, column: 23 }, + original: { line: 22, column: 22 }, + source: 'http://www.example.com/baz.coffee' + }); + bundleMap.setSourceContent( + 'http://www.example.com/baz.coffee', + 'baz coffee' + ); + bundleMap = new SourceMapConsumer(bundleMap.toJSON()); + + var minifiedMap = new SourceMapGenerator({ + file: 'bundle.min.js', + sourceRoot: '..' + }); + minifiedMap.addMapping({ + generated: { line: 1, column: 1 }, + original: { line: 3, column: 3 }, + source: 'temp/bundle.js' + }); + minifiedMap.addMapping({ + generated: { line: 11, column: 11 }, + original: { line: 13, column: 13 }, + source: 'temp/bundle.js' + }); + minifiedMap.addMapping({ + generated: { line: 21, column: 21 }, + original: { line: 23, column: 23 }, + source: 'temp/bundle.js' + }); + minifiedMap = new SourceMapConsumer(minifiedMap.toJSON()); + + var expectedMap = function (sources) { + var map = new SourceMapGenerator({ + file: 'bundle.min.js', + sourceRoot: '..' + }); + map.addMapping({ + generated: { line: 1, column: 1 }, + original: { line: 2, column: 2 }, + source: sources[0] + }); + map.setSourceContent(sources[0], 'foo coffee'); + map.addMapping({ + generated: { line: 11, column: 11 }, + original: { line: 12, column: 12 }, + source: sources[1] + }); + map.setSourceContent(sources[1], 'bar coffee'); + map.addMapping({ + generated: { line: 21, column: 21 }, + original: { line: 22, column: 22 }, + source: sources[2] + }); + map.setSourceContent(sources[2], 'baz coffee'); + return map.toJSON(); + } + + var actualMap = function (aSourceMapPath) { + var map = SourceMapGenerator.fromSourceMap(minifiedMap); + // Note that relying on `bundleMap.file` (which is simply 'bundle.js') + // instead of supplying the second parameter wouldn't work here. + map.applySourceMap(bundleMap, '../temp/bundle.js', aSourceMapPath); + return map.toJSON(); + } + + util.assertEqualMaps(assert, actualMap('../temp/temp_maps'), expectedMap([ + 'coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + util.assertEqualMaps(assert, actualMap('/app/temp/temp_maps'), expectedMap([ + '/app/coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + util.assertEqualMaps(assert, actualMap('http://foo.org/app/temp/temp_maps'), expectedMap([ + 'http://foo.org/app/coffee/foo.coffee', + 'http://foo.org/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + // If the third parameter is omitted or set to the current working + // directory we get incorrect source paths: + + util.assertEqualMaps(assert, actualMap(), expectedMap([ + '../coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + util.assertEqualMaps(assert, actualMap(''), expectedMap([ + '../coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + util.assertEqualMaps(assert, actualMap('.'), expectedMap([ + '../coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + + util.assertEqualMaps(assert, actualMap('./'), expectedMap([ + '../coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee' + ])); + }; + + exports['test applySourceMap name handling'] = function (assert, util) { + // Imagine some CoffeeScript code being compiled into JavaScript and then + // minified. + + var assertName = function(coffeeName, jsName, expectedName) { + var minifiedMap = new SourceMapGenerator({ + file: 'test.js.min' + }); + minifiedMap.addMapping({ + generated: { line: 1, column: 4 }, + original: { line: 1, column: 4 }, + source: 'test.js', + name: jsName + }); + + var coffeeMap = new SourceMapGenerator({ + file: 'test.js' + }); + coffeeMap.addMapping({ + generated: { line: 1, column: 4 }, + original: { line: 1, column: 0 }, + source: 'test.coffee', + name: coffeeName + }); + + minifiedMap.applySourceMap(new SourceMapConsumer(coffeeMap.toJSON())); + + new SourceMapConsumer(minifiedMap.toJSON()).eachMapping(function(mapping) { + assert.equal(mapping.name, expectedName); + }); + }; + + // `foo = 1` -> `var foo = 1;` -> `var a=1` + // CoffeeScript doesn’t rename variables, so there’s no need for it to + // provide names in its source maps. Minifiers do rename variables and + // therefore do provide names in their source maps. So that name should be + // retained if the original map lacks names. + assertName(null, 'foo', 'foo'); + + // `foo = 1` -> `var coffee$foo = 1;` -> `var a=1` + // Imagine that CoffeeScript prefixed all variables with `coffee$`. Even + // though the minifier then also provides a name, the original name is + // what corresponds to the source. + assertName('foo', 'coffee$foo', 'foo'); + + // `foo = 1` -> `var coffee$foo = 1;` -> `var coffee$foo=1` + // Minifiers can turn off variable mangling. Then there’s no need to + // provide names in the source map, but the names from the original map are + // still needed. + assertName('foo', null, 'foo'); + + // `foo = 1` -> `var foo = 1;` -> `var foo=1` + // No renaming at all. + assertName(null, null, null); + }; + exports['test sorting with duplicate generated mappings'] = function (assert, util) { var map = new SourceMapGenerator({ file: 'test.js' @@ -414,4 +669,11 @@ define(function (require, exports, module) { }); }; + exports['test setting sourcesContent to null when already null'] = function (assert, util) { + var smg = new SourceMapGenerator({ file: "foo.js" }); + assert.doesNotThrow(function() { + smg.setSourceContent("bar.js", null); + }); + }; + }); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-node.js b/node_modules/source-map/test/source-map/test-source-node.js similarity index 58% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-node.js rename to node_modules/source-map/test/source-map/test-source-node.js index 6e0eca82d..139af4e44 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-node.js +++ b/node_modules/source-map/test/source-map/test-source-node.js @@ -13,6 +13,12 @@ define(function (require, exports, module) { var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer; var SourceNode = require('../../lib/source-map/source-node').SourceNode; + function forEachNewline(fn) { + return function (assert, util) { + ['\n', '\r\n'].forEach(fn.bind(null, assert, util)); + } + } + exports['test .add()'] = function (assert, util) { var node = new SourceNode(null, null, null); @@ -128,20 +134,35 @@ define(function (require, exports, module) { assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?'); }; - exports['test .toStringWithSourceMap()'] = function (assert, util) { + exports['test .toStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) { var node = new SourceNode(null, null, null, - ['(function () {\n', + ['(function () {' + nl, ' ', new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'), new SourceNode(1, 8, 'a.js', '()'), - ';\n', - ' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n', + ';' + nl, + ' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';' + nl, '}());']); - var map = node.toStringWithSourceMap({ + var result = node.toStringWithSourceMap({ file: 'foo.js' - }).map; + }); + + assert.equal(result.code, [ + '(function () {', + ' someCall();', + ' if (foo) bar();', + '}());' + ].join(nl)); + + var map = result.map; + var mapWithoutOptions = node.toStringWithSourceMap().map; assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator'); + assert.ok(mapWithoutOptions instanceof SourceMapGenerator, 'mapWithoutOptions instanceof SourceMapGenerator'); + assert.ok(!('file' in mapWithoutOptions)); + mapWithoutOptions._file = 'foo.js'; + util.assertEqualMaps(assert, map.toJSON(), mapWithoutOptions.toJSON()); + map = new SourceMapConsumer(map.toString()); var actual; @@ -186,11 +207,12 @@ define(function (require, exports, module) { assert.equal(actual.source, null); assert.equal(actual.line, null); assert.equal(actual.column, null); - }; + }); - exports['test .fromStringWithSourceMap()'] = function (assert, util) { + exports['test .fromStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) { + var testCode = util.testGeneratedCode.replace(/\n/g, nl); var node = SourceNode.fromStringWithSourceMap( - util.testGeneratedCode, + testCode, new SourceMapConsumer(util.testMap)); var result = node.toStringWithSourceMap({ @@ -199,17 +221,17 @@ define(function (require, exports, module) { var map = result.map; var code = result.code; - assert.equal(code, util.testGeneratedCode); + assert.equal(code, testCode); assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator'); map = map.toJSON(); assert.equal(map.version, util.testMap.version); assert.equal(map.file, util.testMap.file); assert.equal(map.mappings, util.testMap.mappings); - }; + }); - exports['test .fromStringWithSourceMap() empty map'] = function (assert, util) { + exports['test .fromStringWithSourceMap() empty map'] = forEachNewline(function (assert, util, nl) { var node = SourceNode.fromStringWithSourceMap( - util.testGeneratedCode, + util.testGeneratedCode.replace(/\n/g, nl), new SourceMapConsumer(util.emptyMap)); var result = node.toStringWithSourceMap({ file: 'min.js' @@ -217,22 +239,22 @@ define(function (require, exports, module) { var map = result.map; var code = result.code; - assert.equal(code, util.testGeneratedCode); + assert.equal(code, util.testGeneratedCode.replace(/\n/g, nl)); assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator'); map = map.toJSON(); assert.equal(map.version, util.emptyMap.version); assert.equal(map.file, util.emptyMap.file); assert.equal(map.mappings.length, util.emptyMap.mappings.length); assert.equal(map.mappings, util.emptyMap.mappings); - }; + }); - exports['test .fromStringWithSourceMap() complex version'] = function (assert, util) { + exports['test .fromStringWithSourceMap() complex version'] = forEachNewline(function (assert, util, nl) { var input = new SourceNode(null, null, null, [ - "(function() {\n", - " var Test = {};\n", - " ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };\n"), - " ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), "\n", - "}());\n", + "(function() {" + nl, + " var Test = {};" + nl, + " ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };" + nl), + " ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), nl, + "}());" + nl, "/* Generated Source */"]); input = input.toStringWithSourceMap({ file: 'foo.js' @@ -253,25 +275,123 @@ define(function (require, exports, module) { map = map.toJSON(); var inputMap = input.map.toJSON(); util.assertEqualMaps(assert, map, inputMap); + }); + + exports['test .fromStringWithSourceMap() third argument'] = function (assert, util) { + // Assume the following directory structure: + // + // http://foo.org/ + // bar.coffee + // app/ + // coffee/ + // foo.coffee + // coffeeBundle.js # Made from {foo,bar,baz}.coffee + // maps/ + // coffeeBundle.js.map + // js/ + // foo.js + // public/ + // app.js # Made from {foo,coffeeBundle}.js + // app.js.map + // + // http://www.example.com/ + // baz.coffee + + var coffeeBundle = new SourceNode(1, 0, 'foo.coffee', 'foo(coffee);\n'); + coffeeBundle.setSourceContent('foo.coffee', 'foo coffee'); + coffeeBundle.add(new SourceNode(2, 0, '/bar.coffee', 'bar(coffee);\n')); + coffeeBundle.add(new SourceNode(3, 0, 'http://www.example.com/baz.coffee', 'baz(coffee);')); + coffeeBundle = coffeeBundle.toStringWithSourceMap({ + file: 'foo.js', + sourceRoot: '..' + }); + + var foo = new SourceNode(1, 0, 'foo.js', 'foo(js);'); + + var test = function(relativePath, expectedSources) { + var app = new SourceNode(); + app.add(SourceNode.fromStringWithSourceMap( + coffeeBundle.code, + new SourceMapConsumer(coffeeBundle.map.toString()), + relativePath)); + app.add(foo); + var i = 0; + app.walk(function (chunk, loc) { + assert.equal(loc.source, expectedSources[i]); + i++; + }); + app.walkSourceContents(function (sourceFile, sourceContent) { + assert.equal(sourceFile, expectedSources[0]); + assert.equal(sourceContent, 'foo coffee'); + }) + }; + + test('../coffee/maps', [ + '../coffee/foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee', + 'foo.js' + ]); + + // If the third parameter is omitted or set to the current working + // directory we get incorrect source paths: + + test(undefined, [ + '../foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee', + 'foo.js' + ]); + + test('', [ + '../foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee', + 'foo.js' + ]); + + test('.', [ + '../foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee', + 'foo.js' + ]); + + test('./', [ + '../foo.coffee', + '/bar.coffee', + 'http://www.example.com/baz.coffee', + 'foo.js' + ]); }; - exports['test .fromStringWithSourceMap() merging duplicate mappings'] = function (assert, util) { + exports['test .toStringWithSourceMap() merging duplicate mappings'] = forEachNewline(function (assert, util, nl) { var input = new SourceNode(null, null, null, [ new SourceNode(1, 0, "a.js", "(function"), - new SourceNode(1, 0, "a.js", "() {\n"), + new SourceNode(1, 0, "a.js", "() {" + nl), " ", new SourceNode(1, 0, "a.js", "var Test = "), - new SourceNode(1, 0, "b.js", "{};\n"), + new SourceNode(1, 0, "b.js", "{};" + nl), new SourceNode(2, 0, "b.js", "Test"), new SourceNode(2, 0, "b.js", ".A", "A"), - new SourceNode(2, 20, "b.js", " = { value: 1234 };\n", "A"), - "}());\n", + new SourceNode(2, 20, "b.js", " = { value: ", "A"), + "1234", + new SourceNode(2, 40, "b.js", " };" + nl, "A"), + "}());" + nl, "/* Generated Source */" ]); input = input.toStringWithSourceMap({ file: 'foo.js' }); + assert.equal(input.code, [ + "(function() {", + " var Test = {};", + "Test.A = { value: 1234 };", + "}());", + "/* Generated Source */" + ].join(nl)) + var correctMap = new SourceMapGenerator({ file: 'foo.js' }); @@ -280,9 +400,8 @@ define(function (require, exports, module) { source: 'a.js', original: { line: 1, column: 0 } }); - correctMap.addMapping({ - generated: { line: 2, column: 0 } - }); + // Here is no need for a empty mapping, + // because mappings ends at eol correctMap.addMapping({ generated: { line: 2, column: 2 }, source: 'a.js', @@ -310,15 +429,143 @@ define(function (require, exports, module) { name: 'A', original: { line: 2, column: 20 } }); + // This empty mapping is required, + // because there is a hole in the middle of the line + correctMap.addMapping({ + generated: { line: 3, column: 18 } + }); correctMap.addMapping({ - generated: { line: 4, column: 0 } + generated: { line: 3, column: 22 }, + source: 'b.js', + name: 'A', + original: { line: 2, column: 40 } }); + // Here is no need for a empty mapping, + // because mappings ends at eol var inputMap = input.map.toJSON(); correctMap = correctMap.toJSON(); - util.assertEqualMaps(assert, correctMap, inputMap); + util.assertEqualMaps(assert, inputMap, correctMap); + }); + + exports['test .toStringWithSourceMap() multi-line SourceNodes'] = forEachNewline(function (assert, util, nl) { + var input = new SourceNode(null, null, null, [ + new SourceNode(1, 0, "a.js", "(function() {" + nl + "var nextLine = 1;" + nl + "anotherLine();" + nl), + new SourceNode(2, 2, "b.js", "Test.call(this, 123);" + nl), + new SourceNode(2, 2, "b.js", "this['stuff'] = 'v';" + nl), + new SourceNode(2, 2, "b.js", "anotherLine();" + nl), + "/*" + nl + "Generated" + nl + "Source" + nl + "*/" + nl, + new SourceNode(3, 4, "c.js", "anotherLine();" + nl), + "/*" + nl + "Generated" + nl + "Source" + nl + "*/" + ]); + input = input.toStringWithSourceMap({ + file: 'foo.js' + }); + + assert.equal(input.code, [ + "(function() {", + "var nextLine = 1;", + "anotherLine();", + "Test.call(this, 123);", + "this['stuff'] = 'v';", + "anotherLine();", + "/*", + "Generated", + "Source", + "*/", + "anotherLine();", + "/*", + "Generated", + "Source", + "*/" + ].join(nl)); + + var correctMap = new SourceMapGenerator({ + file: 'foo.js' + }); + correctMap.addMapping({ + generated: { line: 1, column: 0 }, + source: 'a.js', + original: { line: 1, column: 0 } + }); + correctMap.addMapping({ + generated: { line: 2, column: 0 }, + source: 'a.js', + original: { line: 1, column: 0 } + }); + correctMap.addMapping({ + generated: { line: 3, column: 0 }, + source: 'a.js', + original: { line: 1, column: 0 } + }); + correctMap.addMapping({ + generated: { line: 4, column: 0 }, + source: 'b.js', + original: { line: 2, column: 2 } + }); + correctMap.addMapping({ + generated: { line: 5, column: 0 }, + source: 'b.js', + original: { line: 2, column: 2 } + }); + correctMap.addMapping({ + generated: { line: 6, column: 0 }, + source: 'b.js', + original: { line: 2, column: 2 } + }); + correctMap.addMapping({ + generated: { line: 11, column: 0 }, + source: 'c.js', + original: { line: 3, column: 4 } + }); + + var inputMap = input.map.toJSON(); + correctMap = correctMap.toJSON(); + util.assertEqualMaps(assert, inputMap, correctMap); + }); + + exports['test .toStringWithSourceMap() with empty string'] = function (assert, util) { + var node = new SourceNode(1, 0, 'empty.js', ''); + var result = node.toStringWithSourceMap(); + assert.equal(result.code, ''); }; + exports['test .toStringWithSourceMap() with consecutive newlines'] = forEachNewline(function (assert, util, nl) { + var input = new SourceNode(null, null, null, [ + "/***/" + nl + nl, + new SourceNode(1, 0, "a.js", "'use strict';" + nl), + new SourceNode(2, 0, "a.js", "a();"), + ]); + input = input.toStringWithSourceMap({ + file: 'foo.js' + }); + + assert.equal(input.code, [ + "/***/", + "", + "'use strict';", + "a();", + ].join(nl)); + + var correctMap = new SourceMapGenerator({ + file: 'foo.js' + }); + correctMap.addMapping({ + generated: { line: 3, column: 0 }, + source: 'a.js', + original: { line: 1, column: 0 } + }); + correctMap.addMapping({ + generated: { line: 4, column: 0 }, + source: 'a.js', + original: { line: 2, column: 0 } + }); + + var inputMap = input.map.toJSON(); + correctMap = correctMap.toJSON(); + util.assertEqualMaps(assert, inputMap, correctMap); + }); + exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) { var aNode = new SourceNode(1, 1, 'a.js', 'a'); aNode.setSourceContent('a.js', 'someContent'); diff --git a/node_modules/source-map/test/source-map/test-util.js b/node_modules/source-map/test/source-map/test-util.js new file mode 100644 index 000000000..997d1a269 --- /dev/null +++ b/node_modules/source-map/test/source-map/test-util.js @@ -0,0 +1,216 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ +if (typeof define !== 'function') { + var define = require('amdefine')(module, require); +} +define(function (require, exports, module) { + + var libUtil = require('../../lib/source-map/util'); + + exports['test urls'] = function (assert, util) { + var assertUrl = function (url) { + assert.equal(url, libUtil.urlGenerate(libUtil.urlParse(url))); + }; + assertUrl('http://'); + assertUrl('http://www.example.com'); + assertUrl('http://user:pass@www.example.com'); + assertUrl('http://www.example.com:80'); + assertUrl('http://www.example.com/'); + assertUrl('http://www.example.com/foo/bar'); + assertUrl('http://www.example.com/foo/bar/'); + assertUrl('http://user:pass@www.example.com:80/foo/bar/'); + + assertUrl('//'); + assertUrl('//www.example.com'); + assertUrl('file:///www.example.com'); + + assert.equal(libUtil.urlParse(''), null); + assert.equal(libUtil.urlParse('.'), null); + assert.equal(libUtil.urlParse('..'), null); + assert.equal(libUtil.urlParse('a'), null); + assert.equal(libUtil.urlParse('a/b'), null); + assert.equal(libUtil.urlParse('a//b'), null); + assert.equal(libUtil.urlParse('/a'), null); + assert.equal(libUtil.urlParse('data:foo,bar'), null); + }; + + exports['test normalize()'] = function (assert, util) { + assert.equal(libUtil.normalize('/..'), '/'); + assert.equal(libUtil.normalize('/../'), '/'); + assert.equal(libUtil.normalize('/../../../..'), '/'); + assert.equal(libUtil.normalize('/../../../../a/b/c'), '/a/b/c'); + assert.equal(libUtil.normalize('/a/b/c/../../../d/../../e'), '/e'); + + assert.equal(libUtil.normalize('..'), '..'); + assert.equal(libUtil.normalize('../'), '../'); + assert.equal(libUtil.normalize('../../a/'), '../../a/'); + assert.equal(libUtil.normalize('a/..'), '.'); + assert.equal(libUtil.normalize('a/../../..'), '../..'); + + assert.equal(libUtil.normalize('/.'), '/'); + assert.equal(libUtil.normalize('/./'), '/'); + assert.equal(libUtil.normalize('/./././.'), '/'); + assert.equal(libUtil.normalize('/././././a/b/c'), '/a/b/c'); + assert.equal(libUtil.normalize('/a/b/c/./././d/././e'), '/a/b/c/d/e'); + + assert.equal(libUtil.normalize(''), '.'); + assert.equal(libUtil.normalize('.'), '.'); + assert.equal(libUtil.normalize('./'), '.'); + assert.equal(libUtil.normalize('././a'), 'a'); + assert.equal(libUtil.normalize('a/./'), 'a/'); + assert.equal(libUtil.normalize('a/././.'), 'a'); + + assert.equal(libUtil.normalize('/a/b//c////d/////'), '/a/b/c/d/'); + assert.equal(libUtil.normalize('///a/b//c////d/////'), '///a/b/c/d/'); + assert.equal(libUtil.normalize('a/b//c////d'), 'a/b/c/d'); + + assert.equal(libUtil.normalize('.///.././../a/b//./..'), '../../a') + + assert.equal(libUtil.normalize('http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.normalize('http://www.example.com/'), 'http://www.example.com/'); + assert.equal(libUtil.normalize('http://www.example.com/./..//a/b/c/.././d//'), 'http://www.example.com/a/b/d/'); + }; + + exports['test join()'] = function (assert, util) { + assert.equal(libUtil.join('a', 'b'), 'a/b'); + assert.equal(libUtil.join('a/', 'b'), 'a/b'); + assert.equal(libUtil.join('a//', 'b'), 'a/b'); + assert.equal(libUtil.join('a', 'b/'), 'a/b/'); + assert.equal(libUtil.join('a', 'b//'), 'a/b/'); + assert.equal(libUtil.join('a/', '/b'), '/b'); + assert.equal(libUtil.join('a//', '//b'), '//b'); + + assert.equal(libUtil.join('a', '..'), '.'); + assert.equal(libUtil.join('a', '../b'), 'b'); + assert.equal(libUtil.join('a/b', '../c'), 'a/c'); + + assert.equal(libUtil.join('a', '.'), 'a'); + assert.equal(libUtil.join('a', './b'), 'a/b'); + assert.equal(libUtil.join('a/b', './c'), 'a/b/c'); + + assert.equal(libUtil.join('a', 'http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('a', 'data:foo,bar'), 'data:foo,bar'); + + + assert.equal(libUtil.join('', 'b'), 'b'); + assert.equal(libUtil.join('.', 'b'), 'b'); + assert.equal(libUtil.join('', 'b/'), 'b/'); + assert.equal(libUtil.join('.', 'b/'), 'b/'); + assert.equal(libUtil.join('', 'b//'), 'b/'); + assert.equal(libUtil.join('.', 'b//'), 'b/'); + + assert.equal(libUtil.join('', '..'), '..'); + assert.equal(libUtil.join('.', '..'), '..'); + assert.equal(libUtil.join('', '../b'), '../b'); + assert.equal(libUtil.join('.', '../b'), '../b'); + + assert.equal(libUtil.join('', '.'), '.'); + assert.equal(libUtil.join('.', '.'), '.'); + assert.equal(libUtil.join('', './b'), 'b'); + assert.equal(libUtil.join('.', './b'), 'b'); + + assert.equal(libUtil.join('', 'http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('.', 'http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('', 'data:foo,bar'), 'data:foo,bar'); + assert.equal(libUtil.join('.', 'data:foo,bar'), 'data:foo,bar'); + + + assert.equal(libUtil.join('..', 'b'), '../b'); + assert.equal(libUtil.join('..', 'b/'), '../b/'); + assert.equal(libUtil.join('..', 'b//'), '../b/'); + + assert.equal(libUtil.join('..', '..'), '../..'); + assert.equal(libUtil.join('..', '../b'), '../../b'); + + assert.equal(libUtil.join('..', '.'), '..'); + assert.equal(libUtil.join('..', './b'), '../b'); + + assert.equal(libUtil.join('..', 'http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('..', 'data:foo,bar'), 'data:foo,bar'); + + + assert.equal(libUtil.join('a', ''), 'a'); + assert.equal(libUtil.join('a', '.'), 'a'); + assert.equal(libUtil.join('a/', ''), 'a'); + assert.equal(libUtil.join('a/', '.'), 'a'); + assert.equal(libUtil.join('a//', ''), 'a'); + assert.equal(libUtil.join('a//', '.'), 'a'); + assert.equal(libUtil.join('/a', ''), '/a'); + assert.equal(libUtil.join('/a', '.'), '/a'); + assert.equal(libUtil.join('', ''), '.'); + assert.equal(libUtil.join('.', ''), '.'); + assert.equal(libUtil.join('.', ''), '.'); + assert.equal(libUtil.join('.', '.'), '.'); + assert.equal(libUtil.join('..', ''), '..'); + assert.equal(libUtil.join('..', '.'), '..'); + assert.equal(libUtil.join('http://foo.org/a', ''), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a/', ''), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a/', '.'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a//', ''), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a//', '.'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org', ''), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org', '.'), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org/', ''), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org/', '.'), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org//', ''), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org//', '.'), 'http://foo.org/'); + assert.equal(libUtil.join('//www.example.com', ''), '//www.example.com/'); + assert.equal(libUtil.join('//www.example.com', '.'), '//www.example.com/'); + + + assert.equal(libUtil.join('http://foo.org/a', 'b'), 'http://foo.org/a/b'); + assert.equal(libUtil.join('http://foo.org/a/', 'b'), 'http://foo.org/a/b'); + assert.equal(libUtil.join('http://foo.org/a//', 'b'), 'http://foo.org/a/b'); + assert.equal(libUtil.join('http://foo.org/a', 'b/'), 'http://foo.org/a/b/'); + assert.equal(libUtil.join('http://foo.org/a', 'b//'), 'http://foo.org/a/b/'); + assert.equal(libUtil.join('http://foo.org/a/', '/b'), 'http://foo.org/b'); + assert.equal(libUtil.join('http://foo.org/a//', '//b'), 'http://b'); + + assert.equal(libUtil.join('http://foo.org/a', '..'), 'http://foo.org/'); + assert.equal(libUtil.join('http://foo.org/a', '../b'), 'http://foo.org/b'); + assert.equal(libUtil.join('http://foo.org/a/b', '../c'), 'http://foo.org/a/c'); + + assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/a', './b'), 'http://foo.org/a/b'); + assert.equal(libUtil.join('http://foo.org/a/b', './c'), 'http://foo.org/a/b/c'); + + assert.equal(libUtil.join('http://foo.org/a', 'http://www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('http://foo.org/a', 'data:foo,bar'), 'data:foo,bar'); + + + assert.equal(libUtil.join('http://foo.org', 'a'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/', 'a'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org//', 'a'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org', '/a'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org/', '/a'), 'http://foo.org/a'); + assert.equal(libUtil.join('http://foo.org//', '/a'), 'http://foo.org/a'); + + + assert.equal(libUtil.join('http://', 'www.example.com'), 'http://www.example.com'); + assert.equal(libUtil.join('file:///', 'www.example.com'), 'file:///www.example.com'); + assert.equal(libUtil.join('http://', 'ftp://example.com'), 'ftp://example.com'); + + assert.equal(libUtil.join('http://www.example.com', '//foo.org/bar'), 'http://foo.org/bar'); + assert.equal(libUtil.join('//www.example.com', '//foo.org/bar'), '//foo.org/bar'); + }; + + // TODO Issue #128: Define and test this function properly. + exports['test relative()'] = function (assert, util) { + assert.equal(libUtil.relative('/the/root', '/the/root/one.js'), 'one.js'); + assert.equal(libUtil.relative('/the/root', '/the/rootone.js'), '/the/rootone.js'); + + assert.equal(libUtil.relative('', '/the/root/one.js'), '/the/root/one.js'); + assert.equal(libUtil.relative('.', '/the/root/one.js'), '/the/root/one.js'); + assert.equal(libUtil.relative('', 'the/root/one.js'), 'the/root/one.js'); + assert.equal(libUtil.relative('.', 'the/root/one.js'), 'the/root/one.js'); + + assert.equal(libUtil.relative('/', '/the/root/one.js'), 'the/root/one.js'); + assert.equal(libUtil.relative('/', 'the/root/one.js'), 'the/root/one.js'); + }; + +}); diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/util.js b/node_modules/source-map/test/source-map/util.js similarity index 86% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/util.js rename to node_modules/source-map/test/source-map/util.js index 288046bfa..56bbe2c31 100644 --- a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/node_modules/source-map/test/source-map/util.js +++ b/node_modules/source-map/test/source-map/util.js @@ -40,6 +40,21 @@ define(function (require, exports, module) { sourceRoot: '/the/root', mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' }; + exports.testMapNoSourceRoot = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' + }; + exports.testMapEmptySourceRoot = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: '', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' + }; exports.testMapWithSourcesContent = { version: 3, file: 'min.js', @@ -56,6 +71,22 @@ define(function (require, exports, module) { sourceRoot: '/the/root', mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' }; + exports.testMapRelativeSources = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['./one.js', './two.js'], + sourcesContent: [ + ' ONE.foo = function (bar) {\n' + + ' return baz(bar);\n' + + ' };', + ' TWO.inc = function (n) {\n' + + ' return n + 1;\n' + + ' };' + ], + sourceRoot: '/the/root', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' + }; exports.emptyMap = { version: 3, file: 'min.js', diff --git a/node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore b/node_modules/stream-counter/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/glob/node_modules/minimatch/node_modules/lru-cache/.npmignore rename to node_modules/stream-counter/.npmignore diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/README.md b/node_modules/stream-counter/README.md similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/README.md rename to node_modules/stream-counter/README.md diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/index.js b/node_modules/stream-counter/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/index.js rename to node_modules/stream-counter/index.js diff --git a/node_modules/stream-counter/package.json b/node_modules/stream-counter/package.json new file mode 100644 index 000000000..d66af77a9 --- /dev/null +++ b/node_modules/stream-counter/package.json @@ -0,0 +1,53 @@ +{ + "_from": "stream-counter@~0.2.0", + "_id": "stream-counter@0.2.0", + "_inBundle": false, + "_integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", + "_location": "/stream-counter", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "stream-counter@~0.2.0", + "name": "stream-counter", + "escapedName": "stream-counter", + "rawSpec": "~0.2.0", + "saveSpec": null, + "fetchSpec": "~0.2.0" + }, + "_requiredBy": [ + "/multiparty" + ], + "_resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", + "_shasum": "ded266556319c8b0e222812b9cf3b26fa7d947de", + "_spec": "stream-counter@~0.2.0", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/multiparty", + "author": { + "name": "Andrew Kelley", + "email": "superjoe30@gmail.com" + }, + "bugs": { + "url": "https://github.com/superjoe30/node-stream-counter/issues" + }, + "bundleDependencies": false, + "dependencies": { + "readable-stream": "~1.1.8" + }, + "deprecated": false, + "description": "keeps track of how many bytes have been written to a stream", + "engines": { + "node": ">=0.8.0" + }, + "homepage": "https://github.com/superjoe30/node-stream-counter#readme", + "license": "BSD", + "main": "index.js", + "name": "stream-counter", + "repository": { + "type": "git", + "url": "git://github.com/superjoe30/node-stream-counter.git" + }, + "scripts": { + "test": "node test/test.js" + }, + "version": "0.2.0" +} diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/test/test.js b/node_modules/stream-counter/test/test.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/test/test.js rename to node_modules/stream-counter/test/test.js diff --git a/node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/test/test.txt b/node_modules/stream-counter/test/test.txt similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/multiparty/node_modules/stream-counter/test/test.txt rename to node_modules/stream-counter/test/test.txt diff --git a/node_modules/string_decoder/.npmignore b/node_modules/string_decoder/.npmignore new file mode 100644 index 000000000..206320cc1 --- /dev/null +++ b/node_modules/string_decoder/.npmignore @@ -0,0 +1,2 @@ +build +test diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 000000000..6de584a48 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,20 @@ +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 000000000..4d2aa0015 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,7 @@ +**string_decoder.js** (`require('string_decoder')`) from Node.js core + +Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. + +Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** + +The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/node_modules/string_decoder/index.js b/node_modules/string_decoder/index.js new file mode 100644 index 000000000..b00e54fb7 --- /dev/null +++ b/node_modules/string_decoder/index.js @@ -0,0 +1,221 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var Buffer = require('buffer').Buffer; + +var isBufferEncoding = Buffer.isEncoding + || function(encoding) { + switch (encoding && encoding.toLowerCase()) { + case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; + default: return false; + } + } + + +function assertEncoding(encoding) { + if (encoding && !isBufferEncoding(encoding)) { + throw new Error('Unknown encoding: ' + encoding); + } +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. CESU-8 is handled as part of the UTF-8 encoding. +// +// @TODO Handling all encodings inside a single object makes it very difficult +// to reason about this code, so it should be split up in the future. +// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code +// points as used by CESU-8. +var StringDecoder = exports.StringDecoder = function(encoding) { + this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); + assertEncoding(encoding); + switch (this.encoding) { + case 'utf8': + // CESU-8 represents each of Surrogate Pair by 3-bytes + this.surrogateSize = 3; + break; + case 'ucs2': + case 'utf16le': + // UTF-16 represents each of Surrogate Pair by 2-bytes + this.surrogateSize = 2; + this.detectIncompleteChar = utf16DetectIncompleteChar; + break; + case 'base64': + // Base-64 stores 3 bytes in 4 chars, and pads the remainder. + this.surrogateSize = 3; + this.detectIncompleteChar = base64DetectIncompleteChar; + break; + default: + this.write = passThroughWrite; + return; + } + + // Enough space to store all bytes of a single character. UTF-8 needs 4 + // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). + this.charBuffer = new Buffer(6); + // Number of bytes received for the current incomplete multi-byte character. + this.charReceived = 0; + // Number of bytes expected for the current incomplete multi-byte character. + this.charLength = 0; +}; + + +// write decodes the given buffer and returns it as JS string that is +// guaranteed to not contain any partial multi-byte characters. Any partial +// character found at the end of the buffer is buffered up, and will be +// returned when calling write again with the remaining bytes. +// +// Note: Converting a Buffer containing an orphan surrogate to a String +// currently works, but converting a String to a Buffer (via `new Buffer`, or +// Buffer#write) will replace incomplete surrogates with the unicode +// replacement character. See https://codereview.chromium.org/121173009/ . +StringDecoder.prototype.write = function(buffer) { + var charStr = ''; + // if our last write ended with an incomplete multibyte character + while (this.charLength) { + // determine how many remaining bytes this buffer has to offer for this char + var available = (buffer.length >= this.charLength - this.charReceived) ? + this.charLength - this.charReceived : + buffer.length; + + // add the new bytes to the char buffer + buffer.copy(this.charBuffer, this.charReceived, 0, available); + this.charReceived += available; + + if (this.charReceived < this.charLength) { + // still not enough chars in this buffer? wait for more ... + return ''; + } + + // remove bytes belonging to the current character from the buffer + buffer = buffer.slice(available, buffer.length); + + // get the character that was split + charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); + + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + var charCode = charStr.charCodeAt(charStr.length - 1); + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + this.charLength += this.surrogateSize; + charStr = ''; + continue; + } + this.charReceived = this.charLength = 0; + + // if there are no more bytes in this buffer, just emit our char + if (buffer.length === 0) { + return charStr; + } + break; + } + + // determine and set charLength / charReceived + this.detectIncompleteChar(buffer); + + var end = buffer.length; + if (this.charLength) { + // buffer the incomplete character bytes we got + buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); + end -= this.charReceived; + } + + charStr += buffer.toString(this.encoding, 0, end); + + var end = charStr.length - 1; + var charCode = charStr.charCodeAt(end); + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + var size = this.surrogateSize; + this.charLength += size; + this.charReceived += size; + this.charBuffer.copy(this.charBuffer, size, 0, size); + buffer.copy(this.charBuffer, 0, 0, size); + return charStr.substring(0, end); + } + + // or just emit the charStr + return charStr; +}; + +// detectIncompleteChar determines if there is an incomplete UTF-8 character at +// the end of the given buffer. If so, it sets this.charLength to the byte +// length that character, and sets this.charReceived to the number of bytes +// that are available for this character. +StringDecoder.prototype.detectIncompleteChar = function(buffer) { + // determine how many bytes we have to check at the end of this buffer + var i = (buffer.length >= 3) ? 3 : buffer.length; + + // Figure out if one of the last i bytes of our buffer announces an + // incomplete char. + for (; i > 0; i--) { + var c = buffer[buffer.length - i]; + + // See http://en.wikipedia.org/wiki/UTF-8#Description + + // 110XXXXX + if (i == 1 && c >> 5 == 0x06) { + this.charLength = 2; + break; + } + + // 1110XXXX + if (i <= 2 && c >> 4 == 0x0E) { + this.charLength = 3; + break; + } + + // 11110XXX + if (i <= 3 && c >> 3 == 0x1E) { + this.charLength = 4; + break; + } + } + this.charReceived = i; +}; + +StringDecoder.prototype.end = function(buffer) { + var res = ''; + if (buffer && buffer.length) + res = this.write(buffer); + + if (this.charReceived) { + var cr = this.charReceived; + var buf = this.charBuffer; + var enc = this.encoding; + res += buf.slice(0, cr).toString(enc); + } + + return res; +}; + +function passThroughWrite(buffer) { + return buffer.toString(this.encoding); +} + +function utf16DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 2; + this.charLength = this.charReceived ? 2 : 0; +} + +function base64DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 3; + this.charLength = this.charReceived ? 3 : 0; +} diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 000000000..25973f2f0 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,53 @@ +{ + "_from": "string_decoder@~0.10.x", + "_id": "string_decoder@0.10.31", + "_inBundle": false, + "_integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", + "_location": "/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~0.10.x", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~0.10.x", + "saveSpec": null, + "fetchSpec": "~0.10.x" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", + "_spec": "string_decoder@~0.10.x", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/rvagg/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "tap": "~0.4.8" + }, + "homepage": "https://github.com/rvagg/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "index.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/rvagg/string_decoder.git" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "version": "0.10.31" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/.npmignore b/node_modules/uglify-js/.npmignore similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/.npmignore rename to node_modules/uglify-js/.npmignore diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/.travis.yml b/node_modules/uglify-js/.travis.yml similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/.travis.yml rename to node_modules/uglify-js/.travis.yml diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/LICENSE b/node_modules/uglify-js/LICENSE similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/LICENSE rename to node_modules/uglify-js/LICENSE diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/README.md b/node_modules/uglify-js/README.md similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/README.md rename to node_modules/uglify-js/README.md diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/bin/uglifyjs b/node_modules/uglify-js/bin/uglifyjs old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/bin/uglifyjs rename to node_modules/uglify-js/bin/uglifyjs diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/ast.js b/node_modules/uglify-js/lib/ast.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/ast.js rename to node_modules/uglify-js/lib/ast.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/compress.js b/node_modules/uglify-js/lib/compress.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/compress.js rename to node_modules/uglify-js/lib/compress.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/mozilla-ast.js b/node_modules/uglify-js/lib/mozilla-ast.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/mozilla-ast.js rename to node_modules/uglify-js/lib/mozilla-ast.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/output.js b/node_modules/uglify-js/lib/output.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/output.js rename to node_modules/uglify-js/lib/output.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/parse.js b/node_modules/uglify-js/lib/parse.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/parse.js rename to node_modules/uglify-js/lib/parse.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/scope.js b/node_modules/uglify-js/lib/scope.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/scope.js rename to node_modules/uglify-js/lib/scope.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/sourcemap.js b/node_modules/uglify-js/lib/sourcemap.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/sourcemap.js rename to node_modules/uglify-js/lib/sourcemap.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/transform.js b/node_modules/uglify-js/lib/transform.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/transform.js rename to node_modules/uglify-js/lib/transform.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/utils.js b/node_modules/uglify-js/lib/utils.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/lib/utils.js rename to node_modules/uglify-js/lib/utils.js diff --git a/node_modules/uglify-js/package.json b/node_modules/uglify-js/package.json new file mode 100644 index 000000000..4b3d2cbc1 --- /dev/null +++ b/node_modules/uglify-js/package.json @@ -0,0 +1,60 @@ +{ + "_from": "uglify-js@~2.3", + "_id": "uglify-js@2.3.6", + "_inBundle": false, + "_integrity": "sha1-+gmEdwtCi3qbKoBY9GNV0U/vIRo=", + "_location": "/uglify-js", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "uglify-js@~2.3", + "name": "uglify-js", + "escapedName": "uglify-js", + "rawSpec": "~2.3", + "saveSpec": null, + "fetchSpec": "~2.3" + }, + "_requiredBy": [ + "/handlebars" + ], + "_resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.3.6.tgz", + "_shasum": "fa0984770b428b7a9b2a8058f46355d14fef211a", + "_spec": "uglify-js@~2.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/handlebars", + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "bugs": { + "url": "https://github.com/mishoo/UglifyJS2/issues" + }, + "bundleDependencies": false, + "dependencies": { + "async": "~0.2.6", + "optimist": "~0.3.5", + "source-map": "~0.1.7" + }, + "deprecated": false, + "description": "JavaScript parser, mangler/compressor and beautifier toolkit", + "engines": { + "node": ">=0.4.0" + }, + "homepage": "http://lisperator.net/uglifyjs", + "main": "tools/node.js", + "maintainers": [ + { + "name": "Mihai Bazon", + "email": "mihai.bazon@gmail.com", + "url": "http://lisperator.net/" + } + ], + "name": "uglify-js", + "repository": { + "type": "git", + "url": "git+https://github.com/mishoo/UglifyJS2.git" + }, + "scripts": { + "test": "node test/run-tests.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/arrays.js b/node_modules/uglify-js/test/compress/arrays.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/arrays.js rename to node_modules/uglify-js/test/compress/arrays.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/blocks.js b/node_modules/uglify-js/test/compress/blocks.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/blocks.js rename to node_modules/uglify-js/test/compress/blocks.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/conditionals.js b/node_modules/uglify-js/test/compress/conditionals.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/conditionals.js rename to node_modules/uglify-js/test/compress/conditionals.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/dead-code.js b/node_modules/uglify-js/test/compress/dead-code.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/dead-code.js rename to node_modules/uglify-js/test/compress/dead-code.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/debugger.js b/node_modules/uglify-js/test/compress/debugger.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/debugger.js rename to node_modules/uglify-js/test/compress/debugger.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/drop-unused.js b/node_modules/uglify-js/test/compress/drop-unused.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/drop-unused.js rename to node_modules/uglify-js/test/compress/drop-unused.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-105.js b/node_modules/uglify-js/test/compress/issue-105.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-105.js rename to node_modules/uglify-js/test/compress/issue-105.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-12.js b/node_modules/uglify-js/test/compress/issue-12.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-12.js rename to node_modules/uglify-js/test/compress/issue-12.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-143.js b/node_modules/uglify-js/test/compress/issue-143.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-143.js rename to node_modules/uglify-js/test/compress/issue-143.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-22.js b/node_modules/uglify-js/test/compress/issue-22.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-22.js rename to node_modules/uglify-js/test/compress/issue-22.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-44.js b/node_modules/uglify-js/test/compress/issue-44.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-44.js rename to node_modules/uglify-js/test/compress/issue-44.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-59.js b/node_modules/uglify-js/test/compress/issue-59.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/issue-59.js rename to node_modules/uglify-js/test/compress/issue-59.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/labels.js b/node_modules/uglify-js/test/compress/labels.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/labels.js rename to node_modules/uglify-js/test/compress/labels.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/loops.js b/node_modules/uglify-js/test/compress/loops.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/loops.js rename to node_modules/uglify-js/test/compress/loops.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/properties.js b/node_modules/uglify-js/test/compress/properties.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/properties.js rename to node_modules/uglify-js/test/compress/properties.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/sequences.js b/node_modules/uglify-js/test/compress/sequences.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/sequences.js rename to node_modules/uglify-js/test/compress/sequences.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/switch.js b/node_modules/uglify-js/test/compress/switch.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/switch.js rename to node_modules/uglify-js/test/compress/switch.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/typeof.js b/node_modules/uglify-js/test/compress/typeof.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/compress/typeof.js rename to node_modules/uglify-js/test/compress/typeof.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/run-tests.js b/node_modules/uglify-js/test/run-tests.js old mode 100644 new mode 100755 similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/test/run-tests.js rename to node_modules/uglify-js/test/run-tests.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/tools/node.js b/node_modules/uglify-js/tools/node.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/uglify-js/tools/node.js rename to node_modules/uglify-js/tools/node.js diff --git a/node_modules/express/node_modules/connect/node_modules/uid2/LICENSE b/node_modules/uid2/LICENSE similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/uid2/LICENSE rename to node_modules/uid2/LICENSE diff --git a/node_modules/express/node_modules/connect/node_modules/uid2/index.js b/node_modules/uid2/index.js similarity index 100% rename from node_modules/express/node_modules/connect/node_modules/uid2/index.js rename to node_modules/uid2/index.js diff --git a/node_modules/uid2/package.json b/node_modules/uid2/package.json new file mode 100644 index 000000000..b10eb1bfd --- /dev/null +++ b/node_modules/uid2/package.json @@ -0,0 +1,34 @@ +{ + "_from": "uid2@0.0.3", + "_id": "uid2@0.0.3", + "_inBundle": false, + "_integrity": "sha1-SDEm4Rd03y9xuLY53NeZw3YWK4I=", + "_location": "/uid2", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "uid2@0.0.3", + "name": "uid2", + "escapedName": "uid2", + "rawSpec": "0.0.3", + "saveSpec": null, + "fetchSpec": "0.0.3" + }, + "_requiredBy": [ + "/connect" + ], + "_resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz", + "_shasum": "483126e11774df2f71b8b639dcd799c376162b82", + "_spec": "uid2@0.0.3", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/connect", + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "strong uid", + "name": "uid2", + "tags": [ + "uid" + ], + "version": "0.0.3" +} diff --git a/node_modules/wordwrap/LICENSE b/node_modules/wordwrap/LICENSE new file mode 100644 index 000000000..ee27ba4b4 --- /dev/null +++ b/node_modules/wordwrap/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/README.markdown b/node_modules/wordwrap/README.markdown similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/README.markdown rename to node_modules/wordwrap/README.markdown diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/example/center.js b/node_modules/wordwrap/example/center.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/example/center.js rename to node_modules/wordwrap/example/center.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/example/meat.js b/node_modules/wordwrap/example/meat.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/example/meat.js rename to node_modules/wordwrap/example/meat.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/index.js b/node_modules/wordwrap/index.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/index.js rename to node_modules/wordwrap/index.js diff --git a/node_modules/wordwrap/package.json b/node_modules/wordwrap/package.json new file mode 100644 index 000000000..03c038a11 --- /dev/null +++ b/node_modules/wordwrap/package.json @@ -0,0 +1,66 @@ +{ + "_from": "wordwrap@~0.0.2", + "_id": "wordwrap@0.0.3", + "_inBundle": false, + "_integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "_location": "/wordwrap", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "wordwrap@~0.0.2", + "name": "wordwrap", + "escapedName": "wordwrap", + "rawSpec": "~0.0.2", + "saveSpec": null, + "fetchSpec": "~0.0.2" + }, + "_requiredBy": [ + "/optimist" + ], + "_resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "_shasum": "a3d5da6cd5c0bc0008d37234bbaf1bed63059107", + "_spec": "wordwrap@~0.0.2", + "_where": "/Users/tamtranht02/Documents/GitHub/ixd-skeleton/node_modules/optimist", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/node-wordwrap/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Wrap those words. Show them at what columns to start and stop.", + "devDependencies": { + "expresso": "=0.7.x" + }, + "directories": { + "lib": ".", + "example": "example", + "test": "test" + }, + "engines": { + "node": ">=0.4.0" + }, + "homepage": "https://github.com/substack/node-wordwrap#readme", + "keywords": [ + "word", + "wrap", + "rule", + "format", + "column" + ], + "license": "MIT", + "main": "./index.js", + "name": "wordwrap", + "repository": { + "type": "git", + "url": "git://github.com/substack/node-wordwrap.git" + }, + "scripts": { + "test": "expresso" + }, + "version": "0.0.3" +} diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/break.js b/node_modules/wordwrap/test/break.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/break.js rename to node_modules/wordwrap/test/break.js diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/idleness.txt b/node_modules/wordwrap/test/idleness.txt similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/idleness.txt rename to node_modules/wordwrap/test/idleness.txt diff --git a/node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/wrap.js b/node_modules/wordwrap/test/wrap.js similarity index 100% rename from node_modules/express3-handlebars/node_modules/handlebars/node_modules/optimist/node_modules/wordwrap/test/wrap.js rename to node_modules/wordwrap/test/wrap.js diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..fd5cfbf44 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,409 @@ +{ + "name": "IntroHCI", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "amdefine": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", + "optional": true + }, + "async": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=" + }, + "batch": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.5.0.tgz", + "integrity": "sha1-/S4Fp6XWlrTbkxQBPihdj/NVfsM=" + }, + "bson": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.2.5.tgz", + "integrity": "sha1-UA0m2IPdyOAvLIgBFidjYRHBBcU=" + }, + "buffer-crc32": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.1.tgz", + "integrity": "sha1-vj5TgvwCttYySVasGvmKqYsIU0w=" + }, + "bytes": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-0.2.1.tgz", + "integrity": "sha1-VVsIq8sGP4l1kFMCUj5M1P/f3zE=" + }, + "commander": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-1.3.2.tgz", + "integrity": "sha1-io8w7GcKb91kr1LxkUuQfXnq1bU=", + "requires": { + "keypress": "0.1.x" + } + }, + "connect": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/connect/-/connect-2.12.0.tgz", + "integrity": "sha1-Mdj6DcrN8ZCNgivSkjvootKn7Zo=", + "requires": { + "batch": "0.5.0", + "buffer-crc32": "0.2.1", + "bytes": "0.2.1", + "cookie": "0.1.0", + "cookie-signature": "1.0.1", + "debug": ">= 0.7.3 < 1", + "fresh": "0.2.0", + "methods": "0.1.0", + "multiparty": "2.2.0", + "negotiator": "0.3.0", + "pause": "0.0.1", + "qs": "0.6.6", + "raw-body": "1.1.2", + "send": "0.1.4", + "uid2": "0.0.3" + } + }, + "cookie": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.1.0.tgz", + "integrity": "sha1-kOtGndzpBchm3mh+/EMTHYgB+dA=" + }, + "cookie-signature": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.1.tgz", + "integrity": "sha1-ROByFIrwHm6OJK+/EmkNaK5pjss=" + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "debug": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.8.1.tgz", + "integrity": "sha1-IP9NJvXkIstoobrLu2EDmtjBwTA=" + }, + "express": { + "version": "3.4.8", + "resolved": "https://registry.npmjs.org/express/-/express-3.4.8.tgz", + "integrity": "sha1-qnqJht4HBTM39Lxe2aZFPZzI4uE=", + "requires": { + "buffer-crc32": "0.2.1", + "commander": "1.3.2", + "connect": "2.12.0", + "cookie": "0.1.0", + "cookie-signature": "1.0.1", + "debug": ">= 0.7.3 < 1", + "fresh": "0.2.0", + "merge-descriptors": "0.0.1", + "methods": "0.1.0", + "mkdirp": "0.3.5", + "range-parser": "0.0.4", + "send": "0.1.4" + } + }, + "express3-handlebars": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/express3-handlebars/-/express3-handlebars-0.5.0.tgz", + "integrity": "sha1-f3f++YOM762WfOGOdf0JL7TDE8I=", + "requires": { + "async": "~0.2", + "glob": "3.x", + "handlebars": "1.x", + "semver": "2.x" + } + }, + "fresh": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.2.0.tgz", + "integrity": "sha1-v9lALPPfEsSkwxDHn5mj3eE9NKc=" + }, + "glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", + "integrity": "sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=", + "requires": { + "inherits": "2", + "minimatch": "0.3" + } + }, + "handlebars": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-1.3.0.tgz", + "integrity": "sha1-npsTCpPjiUkTItl1zz7BgYw3zjQ=", + "requires": { + "optimist": "~0.3", + "uglify-js": "~2.3" + } + }, + "hooks": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz", + "integrity": "sha1-D1kbGzRL3LPfWXc/Yvu6+Fv0Aos=" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "kerberos": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz", + "integrity": "sha1-QoXZKgdI2yeEBi9a3OyfWVbLgYo=", + "optional": true + }, + "keypress": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/keypress/-/keypress-0.1.0.tgz", + "integrity": "sha1-SjGI1CkbZrT2XtuZ+AaqmuKTWSo=" + }, + "lru-cache": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", + "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=" + }, + "merge-descriptors": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-0.0.1.tgz", + "integrity": "sha1-L/CYDJJM+B0LXR+2ARd8uLtWwNA=" + }, + "methods": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/methods/-/methods-0.1.0.tgz", + "integrity": "sha1-M11Cnu/SG3us8unJIqjSvRSjDk8=" + }, + "mime": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "integrity": "sha1-WCA+7Ybjpe8XrtK32evUfwpg3RA=" + }, + "minimatch": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", + "integrity": "sha1-J12O2qxPG7MyZHIInnlJyDlGmd0=", + "requires": { + "lru-cache": "2", + "sigmund": "~1.0.0" + } + }, + "mkdirp": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "integrity": "sha1-3j5fiWHIjHh+4TaN+EmsRBPsqNc=" + }, + "mongodb": { + "version": "1.3.23", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.23.tgz", + "integrity": "sha1-h0pSEhYrFhiK7q7l4GBndmyOnoY=", + "requires": { + "bson": "0.2.5", + "kerberos": "0.0.3" + } + }, + "mongoose": { + "version": "3.8.4", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-3.8.4.tgz", + "integrity": "sha1-etSJRNFiE+sWwU7F+9Jc+Q2MWGw=", + "requires": { + "hooks": "0.2.1", + "mongodb": "1.3.23", + "mpath": "0.1.1", + "mpromise": "0.4.3", + "mquery": "0.4.1", + "ms": "0.1.0", + "muri": "0.3.1", + "regexp-clone": "0.0.1", + "sliced": "0.0.5" + } + }, + "mpath": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz", + "integrity": "sha1-I9qFK3wjLuCX9HWdKcDunNItXkY=" + }, + "mpromise": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.4.3.tgz", + "integrity": "sha1-7cR6daKhd7DpOCc121Lb7DgIzDM=" + }, + "mquery": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-0.4.1.tgz", + "integrity": "sha1-xGJt0pID85kavp3bqIeBJWrUL98=", + "requires": { + "debug": "0.7.0", + "mongodb": "1.3.19", + "regexp-clone": "0.0.1", + "sliced": "0.0.5" + }, + "dependencies": { + "bson": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz", + "integrity": "sha1-Pb+YSsudM6aHi0bm+3r71hGFamA=" + }, + "debug": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.0.tgz", + "integrity": "sha1-9b4F7AQ0yZLXmUDlCyaVz7LgGwg=" + }, + "mongodb": { + "version": "1.3.19", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "integrity": "sha1-8inbJAmPAZ2G0TWq+KGrXyZYsdQ=", + "requires": { + "bson": "0.2.2", + "kerberos": "0.0.3" + } + } + } + }, + "ms": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz", + "integrity": "sha1-8h+sSQ2vHXZn/RgP6QdzicyUQrI=" + }, + "multiparty": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/multiparty/-/multiparty-2.2.0.tgz", + "integrity": "sha1-pWfCrwAK0i3I8qZT2Rl4rh9TFvQ=", + "requires": { + "readable-stream": "~1.1.9", + "stream-counter": "~0.2.0" + } + }, + "muri": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz", + "integrity": "sha1-hhiJxchX8aQ3AL7oXVBzH2FyfJo=" + }, + "negotiator": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.3.0.tgz", + "integrity": "sha1-cG1pLv7d9XTVfqn7GriaT6fuj2A=" + }, + "optimist": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "integrity": "sha1-yQlBrVnkJzMokjB00s8ufLxuwNk=", + "requires": { + "wordwrap": "~0.0.2" + } + }, + "pause": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" + }, + "qs": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz", + "integrity": "sha1-bgFQmP9RlouKPIGQAdXyyJvEsQc=" + }, + "range-parser": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-0.0.4.tgz", + "integrity": "sha1-wEJ//vUcEKy6B4KkbJYC50T/Ygs=" + }, + "raw-body": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.2.tgz", + "integrity": "sha1-x0swBN6l3v0WlhcRBqx0DsMdYr4=", + "requires": { + "bytes": "~0.2.1" + } + }, + "readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "regexp-clone": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "integrity": "sha1-p8LgmJH9vzj7sQ03b7cwA+aKxYk=" + }, + "semver": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "integrity": "sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI=" + }, + "send": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/send/-/send-0.1.4.tgz", + "integrity": "sha1-vnDY0b4B3mGCGvE3gLUDRaT3Gr0=", + "requires": { + "debug": "*", + "fresh": "0.2.0", + "mime": "~1.2.9", + "range-parser": "0.0.4" + } + }, + "sigmund": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=" + }, + "sliced": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "integrity": "sha1-XtwETKTrb3gW1Qui/GPiXY/kcH8=" + }, + "source-map": { + "version": "0.1.43", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", + "integrity": "sha1-wkvBRspRfBRx9drL4lcbK3+eM0Y=", + "optional": true, + "requires": { + "amdefine": ">=0.0.4" + } + }, + "stream-counter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/stream-counter/-/stream-counter-0.2.0.tgz", + "integrity": "sha1-3tJmVWMZyLDiIoErnPOyb6fZR94=", + "requires": { + "readable-stream": "~1.1.8" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "uglify-js": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.3.6.tgz", + "integrity": "sha1-+gmEdwtCi3qbKoBY9GNV0U/vIRo=", + "optional": true, + "requires": { + "async": "~0.2.6", + "optimist": "~0.3.5", + "source-map": "~0.1.7" + } + }, + "uid2": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.3.tgz", + "integrity": "sha1-SDEm4Rd03y9xuLY53NeZw3YWK4I=" + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=" + } + } +} diff --git a/package.json b/package.json index fda0e758f..50ab72674 100644 --- a/package.json +++ b/package.json @@ -2,13 +2,11 @@ "name": "IntroHCI", "version": "0.0.1", "author": "Intro HCI Staff", - "repository": { "type": "git", "url": "https://github.com/IntroHCI/lab4" }, "license": "MIT", - "private": true, "scripts": { "start": "node app.js" @@ -21,5 +19,5 @@ }, "engines": { "node": "8.9.x" - } + } } diff --git a/public/css/add-activity.css b/public/css/add-activity.css index 1191ded86..b9f6f5126 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -1,3 +1,7 @@ +body { + font-family: 'Roboto', sans-serif; +} + * { padding: 0; margin: 0; @@ -6,11 +10,12 @@ display: block; text-align: left; font-size: 2em; - margin-left: 10%; + margin-left: 30%; } #back-arrow:hover { cursor: pointer; } + #root-div { margin: 0 auto; width: 50%; @@ -39,4 +44,10 @@ input, select { } input { padding: 2px 5px; +} + +@media only screen and (max-width: 1000px) { + #root-div { + width: 100%; + } } \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index 275039f4b..9f9cfdce7 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -2,6 +2,8 @@ + +
    From 59e5587639f521e2e20983bc059503d0eec73761 Mon Sep 17 00:00:00 2001 From: Hackerry Date: Mon, 15 Feb 2021 18:43:12 -0800 Subject: [PATCH 03/70] Add configuration files to specify line endings --- .gitattributes | 1 + .gitignore | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..85564bd9f --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +eol=lf \ No newline at end of file diff --git a/.gitignore b/.gitignore index 5f8086b26..a72b52ebe 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,4 @@ logs results npm-debug.log -#node_modules +node_modules From a5ee8a95bdfcade8563dff055e349f5ea8e065ab Mon Sep 17 00:00:00 2001 From: Hackerry Date: Mon, 15 Feb 2021 20:57:58 -0800 Subject: [PATCH 04/70] Finish wireframe 2 --- app.js | 2 + public/css/add-activity.css | 31 +++--- public/css/weekly-report.css | 78 ++++++++++++++ report.html | 193 +++++++++++++++++++++++++++++++++++ routes/add.js | 4 +- routes/report.js | 16 +++ views/add.handlebars | 60 ++++++----- views/report.handlebars | 145 ++++++++++++++++++++++++++ 8 files changed, 485 insertions(+), 44 deletions(-) create mode 100644 public/css/weekly-report.css create mode 100644 report.html create mode 100644 routes/report.js create mode 100644 views/report.handlebars diff --git a/app.js b/app.js index f93cdb1a7..32a7b4b35 100644 --- a/app.js +++ b/app.js @@ -11,6 +11,7 @@ var handlebars = require('express3-handlebars') var index = require('./routes/index'); // Example route var add = require('./routes/add'); +var report = require('./routes/report'); var app = express(); @@ -37,6 +38,7 @@ if ('development' == app.get('env')) { app.get('/', index.view); // Example route app.get('/add', add.view); +app.get('/report', report.view); http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); diff --git a/public/css/add-activity.css b/public/css/add-activity.css index b9f6f5126..35ea27a5e 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -6,34 +6,37 @@ body { padding: 0; margin: 0; } +#root-div { + margin: 0 auto; + width: 100%; + text-align: center; +} +#content-div { + width: 100%; +} +#title-bar { + position: relative; +} #back-arrow { - display: block; - text-align: left; + position: absolute; + top: 2px; + left: 30px; font-size: 2em; - margin-left: 30%; } #back-arrow:hover { cursor: pointer; } - -#root-div { - margin: 0 auto; - width: 50%; - text-align: center; -} - #activity-form { - width: 50%; + width: 80%; margin: 0 auto; } .form-entry { - width: 60%; + width: 80%; margin: 20px auto; } #form-submit { - width: 60%; + width: 80%; } - label { display: block; text-align: left; diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css new file mode 100644 index 000000000..5f8f0b0f9 --- /dev/null +++ b/public/css/weekly-report.css @@ -0,0 +1,78 @@ +* { + padding: 0; + margin: 0; +} +#root-div { + width: 100%; + margin: 0 auto; +} +#content-div { + width: 100%; + text-align: center; + margin: 0 auto; +} +#title-bar { + position: relative; +} +#back-arrow { + position: absolute; + top: 2px; + left: 30px; + font-size: 2em; +} +#back-arrow:hover { + cursor: pointer; +} +#options { + width: 100%; + margin: 10px 0; +} +#week, #scale { + background-color: lightgray; + box-sizing: content-box; + border: none; + padding: 5px; + text-align: center; +} +#chart { + margin: 20px 0; +} +#list { + list-style: none; + width: 80%; + margin: 20px auto; +} +.list-entry { + text-align: left; + width: 300px; + margin: 0 auto; + padding-left: 100px; + font-size: 1.2em; +} +.list-entry span { + display: inline-block; + width: 10px; + height: 10px; + border: 1px solid black; + box-sizing: border-box; + margin-right: 10px; +} +#buttons { + width: 100%; +} +#chart-type { + width: 60%; + padding: 5px; + box-sizing: content-box; + background-color: lightgray; + border: none; + text-align: center; + margin: 10px 0; +} +#detail-button, #suggestion-button { + background-color: lightgray; + padding: 5px; + width: 60%; + display: inline-block; + margin: 10px 0; +} \ No newline at end of file diff --git a/report.html b/report.html new file mode 100644 index 000000000..4d6f41c7e --- /dev/null +++ b/report.html @@ -0,0 +1,193 @@ + + + + + + +
    +
    +

    Weekly Report

    +
    + + +
    + + + + + +
      + +
      + +
      + + +
      + Details +
      +
      +
      + Suggestions +
      +
      +
      +
      +
      + + + \ No newline at end of file diff --git a/routes/add.js b/routes/add.js index 36401705a..f9d37f6d4 100644 --- a/routes/add.js +++ b/routes/add.js @@ -1,8 +1,8 @@ /* - * GET home page. + * GET add page. */ exports.view = function(req, res){ res.render('add'); - }; \ No newline at end of file +}; \ No newline at end of file diff --git a/routes/report.js b/routes/report.js new file mode 100644 index 000000000..54e3d4c18 --- /dev/null +++ b/routes/report.js @@ -0,0 +1,16 @@ + +/* + * GET report page. + */ + +exports.view = function(req, res){ + res.render('report', { + "activities": [ + {"name": "work", "hours": 9}, + {"name": "school", "hours": 7}, + {"name": "family", "hours": 4}, + {"name": "sleep", "hours": 4}, + {"name": "fun", "hours": 0}, + ], + }); +}; \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index 9f9cfdce7..11a9d7e0b 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -7,35 +7,39 @@
      - -

      Add Activity

      -
      -
      - - +
      +
      + +

      Add Activity

      -
      - - -
      -
      - - -
      -
      - - -
      -
      - -
      - +
      +
      + + +
      +
      + + +
      +
      + + +
      +
      + + +
      +
      + +
      +
      +
      \ No newline at end of file diff --git a/views/report.handlebars b/views/report.handlebars new file mode 100644 index 000000000..3f86290f2 --- /dev/null +++ b/views/report.handlebars @@ -0,0 +1,145 @@ + + + + + + + + + +
      +
      +
      + +

      Weekly Report

      +
      +
      + + +
      + + + + + +
        + +
        + +
        + + +
        + Details +
        +
        +
        + Suggestions +
        +
        +
        +
        +
        + + + + + \ No newline at end of file From 27445bfac69e9bb587fbf8712e325c3ab632d987 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Mon, 15 Feb 2021 22:15:10 -0800 Subject: [PATCH 05/70] update A5 --- public/css/add-activity.css | 11 +++++++++++ public/css/weekly-report.css | 18 ++++++++++++++++++ views/report.handlebars | 2 ++ 3 files changed, 31 insertions(+) diff --git a/public/css/add-activity.css b/public/css/add-activity.css index 35ea27a5e..af63fa56f 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -6,45 +6,56 @@ body { padding: 0; margin: 0; } + #root-div { margin: 0 auto; width: 100%; text-align: center; } + #content-div { width: 100%; } + #title-bar { position: relative; } + #back-arrow { position: absolute; top: 2px; left: 30px; font-size: 2em; } + #back-arrow:hover { cursor: pointer; } + #activity-form { width: 80%; margin: 0 auto; } + .form-entry { width: 80%; margin: 20px auto; } + #form-submit { width: 80%; } + label { display: block; text-align: left; margin-bottom: 5px; } + input, select { width: 100%; } + input { padding: 2px 5px; } diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css index 5f8f0b0f9..5fb07d977 100644 --- a/public/css/weekly-report.css +++ b/public/css/weekly-report.css @@ -1,32 +1,43 @@ +body { + font-family: 'Roboto', sans-serif; +} + * { padding: 0; margin: 0; } + #root-div { width: 100%; margin: 0 auto; } + #content-div { width: 100%; text-align: center; margin: 0 auto; } + #title-bar { position: relative; } + #back-arrow { position: absolute; top: 2px; left: 30px; font-size: 2em; } + #back-arrow:hover { cursor: pointer; } + #options { width: 100%; margin: 10px 0; } + #week, #scale { background-color: lightgray; box-sizing: content-box; @@ -34,14 +45,17 @@ padding: 5px; text-align: center; } + #chart { margin: 20px 0; } + #list { list-style: none; width: 80%; margin: 20px auto; } + .list-entry { text-align: left; width: 300px; @@ -49,6 +63,7 @@ padding-left: 100px; font-size: 1.2em; } + .list-entry span { display: inline-block; width: 10px; @@ -57,9 +72,11 @@ box-sizing: border-box; margin-right: 10px; } + #buttons { width: 100%; } + #chart-type { width: 60%; padding: 5px; @@ -69,6 +86,7 @@ text-align: center; margin: 10px 0; } + #detail-button, #suggestion-button { background-color: lightgray; padding: 5px; diff --git a/views/report.handlebars b/views/report.handlebars index 3f86290f2..a641abb62 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -5,6 +5,8 @@ + +
        From 2e8e3a557cbef6f7915148f65c1bab5b386d176f Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Tue, 16 Feb 2021 21:22:26 -0800 Subject: [PATCH 06/70] update viewport --- public/css/styles.css | 95 +++++++++++++++++++++++++++++++++++++++++ views/add.handlebars | 1 + views/index.handlebars | 29 ++++++++++++- views/report.handlebars | 1 + 4 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 public/css/styles.css diff --git a/public/css/styles.css b/public/css/styles.css new file mode 100644 index 000000000..6c2061292 --- /dev/null +++ b/public/css/styles.css @@ -0,0 +1,95 @@ +body { + font-family: 'Roboto', sans-serif; +} + +.home-content { + line-height: 25px; +} + +#title-bar { + margin-bottom: 8px; +} + +#mob-title { + margin-bottom: 10px; +} + +* { + padding: 0; + margin: 0; +} + +#root-div { + width: 100%; + margin: 0 auto; +} + +#content-div { + width: 100%; + text-align: center; + margin: 0 auto; +} + +#title-bar { + position: relative; +} + +#back-arrow { + position: absolute; + top: 2px; + left: 30px; + font-size: 2em; +} + +#back-arrow:hover { + cursor: pointer; +} + +#options { + width: 100%; + margin: 10px 0; +} + +#week, #scale { + background-color: lightgray; + box-sizing: content-box; + border: none; + padding: 5px; + text-align: center; +} + +#list { + list-style: none; + width: 80%; + margin: 20px auto; +} + +.list-entry { + text-align: left; + width: 300px; + margin: 0 auto; + padding-left: 100px; + font-size: 1.2em; +} + +.list-entry span { + display: inline-block; + width: 10px; + height: 10px; + border: 1px solid black; + box-sizing: border-box; + margin-right: 10px; +} + +#buttons { + width: 100%; +} + +#add-button, #report-button { + background-color: lightgray; + padding: 5px; + width: 60%; + display: inline-block; + margin: 10px 0; +} + diff --git a/views/add.handlebars b/views/add.handlebars index 11a9d7e0b..5a72c8b6c 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -4,6 +4,7 @@ +
        diff --git a/views/index.handlebars b/views/index.handlebars index 7fef0f9e9..680e84b44 100644 --- a/views/index.handlebars +++ b/views/index.handlebars @@ -1,8 +1,35 @@ Test Page + + + + + -Begin here (or replace this with any static code already written). +
        +
        +
        + +
        +

        MobTrack

        +

        Today is Thursday

        +

        02/18/21

        +

        There are 3 activities added so far.

        +
        +
        +
        +
        + Add Activity +
        +
        +
        + Report History +
        +
        +
        +
        +
        diff --git a/views/report.handlebars b/views/report.handlebars index a641abb62..9a226928e 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -7,6 +7,7 @@ +
        From 27be833dbbc6d47e62e791386d61d061a73a9911 Mon Sep 17 00:00:00 2001 From: Hackerry Date: Tue, 16 Feb 2021 21:45:28 -0800 Subject: [PATCH 07/70] Test nav bar --- activity.json | 9 ++ app.js | 2 + public/css/add-activity.css | 28 +++-- public/css/weekly-report.css | 41 ++++++-- report.html | 193 ----------------------------------- routes/report.js | 11 +- views/add.handlebars | 17 +-- views/report.handlebars | 36 ++++--- 8 files changed, 103 insertions(+), 234 deletions(-) create mode 100644 activity.json delete mode 100644 report.html diff --git a/activity.json b/activity.json new file mode 100644 index 000000000..9e1fb2016 --- /dev/null +++ b/activity.json @@ -0,0 +1,9 @@ +{ + "activities": [ + {"name": "work", "hours": 9}, + {"name": "school", "hours": 7}, + {"name": "family", "hours": 4}, + {"name": "sleep", "hours": 4}, + {"name": "fun", "hours": 0} + ] +} \ No newline at end of file diff --git a/app.js b/app.js index 32a7b4b35..64dd60a52 100644 --- a/app.js +++ b/app.js @@ -11,6 +11,7 @@ var handlebars = require('express3-handlebars') var index = require('./routes/index'); // Example route var add = require('./routes/add'); +var addConfirmation = require("./routes/addConfirmation"); var report = require('./routes/report'); var app = express(); @@ -38,6 +39,7 @@ if ('development' == app.get('env')) { app.get('/', index.view); // Example route app.get('/add', add.view); +app.get('/addConfirmation', addConfirmation.view); app.get('/report', report.view); http.createServer(app).listen(app.get('port'), function(){ diff --git a/public/css/add-activity.css b/public/css/add-activity.css index af63fa56f..f6996f30a 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -7,6 +7,11 @@ body { margin: 0; } +a { + text-decoration: none; + color: black; +} + #root-div { margin: 0 auto; width: 100%; @@ -18,20 +23,31 @@ body { } #title-bar { - position: relative; + font-size: 2em; } -#back-arrow { - position: absolute; - top: 2px; - left: 30px; - font-size: 2em; +#back-bar, #hamburger-bar { + width: 40%; + display: inline-block; + box-sizing: content-box; +} + +#back-bar { + text-align: left; +} + +#hamburger-bar { + text-align: right; } #back-arrow:hover { cursor: pointer; } +#hamburger-button:hover { + cursor: pointer; +} + #activity-form { width: 80%; margin: 0 auto; diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css index 5fb07d977..bbd42665f 100644 --- a/public/css/weekly-report.css +++ b/public/css/weekly-report.css @@ -7,6 +7,11 @@ body { margin: 0; } +a { + text-decoration: none; + color: black; +} + #root-div { width: 100%; margin: 0 auto; @@ -19,20 +24,44 @@ body { } #title-bar { - position: relative; + font-size: 2em; } -#back-arrow { - position: absolute; - top: 2px; - left: 30px; - font-size: 2em; +#back-bar, #hamburger-bar { + width: 40%; + display: inline-block; + box-sizing: content-box; +} + +#back-bar { + text-align: left; +} + +#hamburger-bar { + text-align: right; } #back-arrow:hover { cursor: pointer; } +#hamburger-button:hover { + cursor: pointer; +} + +#nav-bar { + position: absolute; + right: 0; + padding-right: 15%; + background-color: black; + color: white; + height: 100%; + top: 0; + z-index: -1; + text-align: left; + padding-top: 100px; +} + #options { width: 100%; margin: 10px 0; diff --git a/report.html b/report.html deleted file mode 100644 index 4d6f41c7e..000000000 --- a/report.html +++ /dev/null @@ -1,193 +0,0 @@ - - - - - - -
        -
        -

        Weekly Report

        -
        - - -
        - - - - - -
          - -
          - -
          - - -
          - Details -
          -
          -
          - Suggestions -
          -
          -
          -
          -
          - - - \ No newline at end of file diff --git a/routes/report.js b/routes/report.js index 54e3d4c18..5148a288f 100644 --- a/routes/report.js +++ b/routes/report.js @@ -1,16 +1,9 @@ +var activity = require("../activity.json"); /* * GET report page. */ exports.view = function(req, res){ - res.render('report', { - "activities": [ - {"name": "work", "hours": 9}, - {"name": "school", "hours": 7}, - {"name": "family", "hours": 4}, - {"name": "sleep", "hours": 4}, - {"name": "fun", "hours": 0}, - ], - }); + res.render('report', activity); }; \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index 11a9d7e0b..34ab9c808 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -9,13 +9,18 @@
          - -

          Add Activity

          +
          + +
          +
          + +
          -
          +

          Add Activity

          +
          - +
          @@ -29,11 +34,11 @@
          - +
          - +
          diff --git a/views/report.handlebars b/views/report.handlebars index a641abb62..8787d0bcc 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -12,9 +12,23 @@
          - -

          Weekly Report

          +
          + +
          +
          + +
          + + + +

          Weekly Report

          ",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function D(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||j,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,j=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
          "],col:[2,"","
          "],tr:[2,"","
          "],td:[3,"","
          "],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function qe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function Le(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function He(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Oe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Ut,Xt=[],Vt=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Xt.pop()||S.expando+"_"+Ct.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Vt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Vt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Vt,"$1"+r):!1!==e.jsonp&&(e.url+=(Et.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Xt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Ut=E.implementation.createHTMLDocument("").body).innerHTML="
          ",2===Ut.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):("number"==typeof f.top&&(f.top+="px"),"number"==typeof f.left&&(f.left+="px"),c.css(f))}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=$e(y.pixelPosition,function(e,t){if(t)return t=Be(e,n),Me.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0 - + From 78c6a1020e8cad459c8f3d964bb33e2512feaf9e Mon Sep 17 00:00:00 2001 From: Hackerry Date: Sat, 20 Feb 2021 13:18:09 -0800 Subject: [PATCH 14/70] Add confirmation page and login/signup basic logic --- app.js | 5 + public/css/add-confirmation.css | 20 ++++ public/css/login.css | 95 ++++++++++++++++++ public/css/nav-bar.css | 67 +++++++++++++ public/js/navbar.js | 10 ++ routes/addConfirmation.js | 21 ++++ routes/userAction.js | 50 ++++++++++ users.json | 1 + views/add.handlebars | 14 +-- views/addConfirmation.handlebars | 42 ++++++++ views/login.handlebars | 159 ++++++++++++++++++++++++++----- views/report.handlebars | 14 +-- 12 files changed, 452 insertions(+), 46 deletions(-) create mode 100644 public/css/add-confirmation.css create mode 100644 public/css/login.css create mode 100644 public/css/nav-bar.css create mode 100644 public/js/navbar.js create mode 100644 routes/addConfirmation.js create mode 100644 routes/userAction.js create mode 100644 users.json create mode 100644 views/addConfirmation.handlebars diff --git a/app.js b/app.js index fe80a17cf..f27675d50 100644 --- a/app.js +++ b/app.js @@ -13,6 +13,8 @@ var index = require('./routes/index'); var add = require('./routes/add'); var report = require('./routes/report'); var login = require('./routes/login'); +var addConfirmation = require('./routes/addConfirmation'); +var userAction = require('./routes/userAction'); var app = express(); @@ -41,6 +43,9 @@ app.get('/', index.view); app.get('/add', add.view); app.get('/report', report.view); app.get('/login', login.view); +app.get('/addConfirmation', addConfirmation.view); +app.get('/userSignUp', userAction.signUpAction); +app.get('/userLogIn', userAction.logInAction); http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); diff --git a/public/css/add-confirmation.css b/public/css/add-confirmation.css new file mode 100644 index 000000000..e3604fa36 --- /dev/null +++ b/public/css/add-confirmation.css @@ -0,0 +1,20 @@ + +* { + padding: 0; + margin: 0; +} + +a { + text-decoration: none; + color: black; +} + +#root-div { + margin: 0 auto; + width: 100%; + text-align: center; +} + +#content-div { + width: 100%; +} \ No newline at end of file diff --git a/public/css/login.css b/public/css/login.css new file mode 100644 index 000000000..46540019d --- /dev/null +++ b/public/css/login.css @@ -0,0 +1,95 @@ +body { + font-family: 'Roboto', sans-serif; +} + +.home-content { + line-height: 25px; +} + +#title-bar { + margin-bottom: 8px; +} + +#mob-title { + margin-top: 50px; + margin-bottom: 10px; +} + +* { + padding: 0; + margin: 0; +} + +a { + text-decoration: none; + color: black; +} + +#root-div { + width: 100%; + margin: 0 auto; +} + +#content-div { + width: 100%; + text-align: center; + margin: 0 auto; +} + +#login-div { + width: 80%; + margin: 10px auto; +} + +#login-select { + width: 100%; +} + +#login-button, #signup-button { + display: inline-block; + width: 40%; + padding: 5px; +} + +#login-button:hover, #signup-button:hover { + cursor: pointer; +} + +#login-title { + margin: 15px; +} + +.login-entry { + width: 100%; + margin: 5px; +} + +.login-label { + width: 25%; + display: inline-block; + text-align: right; + font-weight: bold; +} + +#login-submit { + border: 2px solid black; + border-radius: 5px; + display: inline-block; + padding: 2px; + margin: 40px; + width: 60%; +} + +#retype-div { + visibility: hidden; +} + +#login-submit:hover { + cursor: pointer; + background-color: lightgray; +} + +.login-option-select { + background-color: black; + color: white; +} diff --git a/public/css/nav-bar.css b/public/css/nav-bar.css new file mode 100644 index 000000000..79fcc358d --- /dev/null +++ b/public/css/nav-bar.css @@ -0,0 +1,67 @@ +#title-bar { + font-size: 2em; + margin-top: 10px; +} + +#back-bar, #hamburger-bar { + width: 48%; + display: inline-block; + box-sizing: content-box; +} + +#back-bar { + text-align: left; +} + +#back-arrow { + padding-left: 20px; +} + +#hamburger-bar { + text-align: right; +} + +#hamburger-button { + padding-right: 20px; +} + +#back-arrow:hover { + cursor: pointer; +} + +#hamburger-button:hover { + cursor: pointer; +} + +#nav-bar { + position: absolute; + right: 0; + padding-right: 10%; + background-color: black; + color: white; + height: 100%; + top: 0; + text-align: left; + padding-top: 100px; +} + +#nav-bar-list { + list-style: none; +} + +#nav-bar-list li { + padding-left: 10px; + line-height: 2em; +} + +#nav-bar-list li a { + text-decoration: none; + color: white; +} + +#nav-close { + position: absolute; + top: 10px; + right: 10px; + font-size: 1.2em; +} \ No newline at end of file diff --git a/public/js/navbar.js b/public/js/navbar.js new file mode 100644 index 000000000..699a31463 --- /dev/null +++ b/public/js/navbar.js @@ -0,0 +1,10 @@ +// Nav bar js +var navBar = document.getElementById("nav-bar"); +var hamburgerButton = document.getElementById("hamburger-button"); +hamburgerButton.onclick = function(e) { + navBar.style.display = 'block'; +} +var navClose = document.getElementById("nav-close"); +navClose.onclick = function(e) { + navBar.style.display = 'none'; +} \ No newline at end of file diff --git a/routes/addConfirmation.js b/routes/addConfirmation.js new file mode 100644 index 000000000..92acdb48c --- /dev/null +++ b/routes/addConfirmation.js @@ -0,0 +1,21 @@ + +/* + * GET addConfirmation page. + */ + +exports.view = function(request, res){ + var activityName = request.query.activityName; + var type = request.query.type; + var startTime = request.query.startTime; + var endTime = request.query.endTime; + console.log(activityName, type, startTime, endTime); + + var newData = { + "activityName": activityName, + "type": type, + "startTime": startTime, + "endTime": endTime, + }; + + res.render('addConfirmation', newData); +}; \ No newline at end of file diff --git a/routes/userAction.js b/routes/userAction.js new file mode 100644 index 000000000..c580c8ab1 --- /dev/null +++ b/routes/userAction.js @@ -0,0 +1,50 @@ +'use strict'; + +const userDatabaseFile = 'users.json'; +const fs = require('fs'); + +exports.signUpAction = function(req, res){ + var username = req.query.username; + var password = req.query.password; + console.log(username, password); + + // Read database + var userDatabase = JSON.parse(fs.readFileSync(userDatabaseFile, 'utf8')); + + // Check duplicate username + if(userDatabase.hasOwnProperty(username)) { + res.json({ + "success": false, + "reason": "Username already taken", + }); + } else { + userDatabase[username] = password; + console.log(userDatabase); + var data = JSON.stringify(userDatabase); + fs.writeFileSync(userDatabaseFile, data, 'utf8'); + res.json({ + "success": true, + }); + } +}; + +exports.logInAction = function(req, res){ + var username = req.query.username; + var password = req.query.password; + console.log("Username:", username, "Password:", password); + + // Read database + var userDatabase = JSON.parse(fs.readFileSync(userDatabaseFile, 'utf8')); + + // Check username exists + if(userDatabase.hasOwnProperty(username) && userDatabase[username] === password) { + res.json({ + "success": true, + }); + } else { + res.json({ + "success": false, + "reason": "Username or password mismatch", + }); + } +}; \ No newline at end of file diff --git a/users.json b/users.json new file mode 100644 index 000000000..756e0b308 --- /dev/null +++ b/users.json @@ -0,0 +1 @@ +{"bob":"xxx","harry":"123","alice":"12345"} \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index 15f493885..aab3f1143 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -2,6 +2,7 @@ + @@ -60,16 +61,5 @@
          - + \ No newline at end of file diff --git a/views/addConfirmation.handlebars b/views/addConfirmation.handlebars new file mode 100644 index 000000000..50ab8591c --- /dev/null +++ b/views/addConfirmation.handlebars @@ -0,0 +1,42 @@ + + + + + + + + + + +
          +
          +
          +
          + +
          +
          + +
          +
          + + + +

          Activity Created!

          +

          {{activityName}}

          +

          {{type}}

          +

          {{startTime}}

          +

          {{endTime}}

          +
          +
          + + + + \ No newline at end of file diff --git a/views/login.handlebars b/views/login.handlebars index 9d9c3593a..abdda983e 100644 --- a/views/login.handlebars +++ b/views/login.handlebars @@ -1,28 +1,141 @@ - - Test Page - - - - - - - -
          -
          -
          -
          -

          MobTrack

          -

          Temporary blank for now.

          + + Test Page + + + + + + + + +
          +
          +
          +
          +

          MobTrack

          +

          Temporary blank for now.

          +
          +
          +
          +
          + +
          Sign Up
          +
          +

          Log In

          + + + +
          Log In
          -
          -
          -
          - + + + diff --git a/views/report.handlebars b/views/report.handlebars index 77c5f6ae0..d9fcf5484 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -2,6 +2,7 @@ + @@ -71,19 +72,10 @@
          + + + * | + * | + * | + * | + * | + * + * USAGE: + * + * - See <$(ele).ptTimeSelect()> + * + * + * + * LAST UPDATED: + * + * - $Date: 2012/08/05 19:40:21 $ + * - $Author: paulinho4u $ + * - $Revision: 1.8 $ + * + */ + +(function($){ + + /** + * jQuery definition + * + * @see http://jquery.com/ + * @name jQuery + * @class jQuery Library + */ + + /** + * jQuery 'fn' definition to anchor all public plugin methods. + * + * @see http://jquery.com/ + * @name fn + * @class jQuery Library public method anchor + * @memberOf jQuery + */ + + /** + * Namespace for all properties and methods + * + * @namespace ptTimeSelect + * @memberOf jQuery + */ + jQuery.ptTimeSelect = {}; + jQuery.ptTimeSelect.version = "__BUILD_VERSION_NUMBER__"; + + /** + * The default options for all calls to ptTimeSelect. Can be + * overwriten with each individual call to {@link jQuery.fn.ptTimeSelect} + * + * @type {Object} options + * @memberOf jQuery.ptTimeSelect + * @see jQuery.fn.ptTimeSelect + */ + jQuery.ptTimeSelect.options = { + containerClass: undefined, + containerWidth: '22em', + hoursLabel: 'Hour', + minutesLabel: 'Minutes', + setButtonLabel: 'Set', + popupImage: undefined, + onFocusDisplay: true, + zIndex: 10, + onBeforeShow: undefined, + onClose: undefined + }; + + /** + * Internal method. Called when page is initialized to add the time + * selection area to the DOM. + * + * @private + * @memberOf jQuery.ptTimeSelect + * @return {undefined} + */ + jQuery.ptTimeSelect._ptTimeSelectInit = function () { + jQuery(document).ready( + function () { + //if the html is not yet created in the document, then do it now + if (!jQuery('#ptTimeSelectCntr').length) { + jQuery("body").append( + '
          ' + + '
          ' + + '
          ' + + '
          ' + + ' ' + + ' X' + + ' ' + + '
          ' + + '
          ' + + ' 1 : ' + + ' 00 ' + + ' AM' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          Hour
          ' + + '
          Minutes
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + ' AM' + + ' PM' + + '
          ' + + '
          ' + + '
          ' + + ' 1' + + ' 2' + + ' 3' + + ' 4' + + ' 5' + + ' 6' + + ' 7' + + ' 8' + + ' 9' + + ' 10' + + ' 11' + + ' 12' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + ' 00' + + ' 05' + + ' 10' + + ' 15' + + ' 20' + + ' 25' + + ' 30' + + ' 35' + + ' 40' + + ' 45' + + ' 50' + + ' 55' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + '
          ' + + ' ' + + ' SET' + + ' ' + + '
          ' + + '
          ' + + ' ' + + '
          ' + ); + + var e = jQuery('#ptTimeSelectCntr'); + + // Add the events to the functions + e.find('.ptTimeSelectMin') + .bind("click", function(){ + jQuery.ptTimeSelect.setMin($(this).text()); + }); + + e.find('.ptTimeSelectHr') + .bind("click", function(){ + jQuery.ptTimeSelect.setHr($(this).text()); + }); + + $(document).mousedown(jQuery.ptTimeSelect._doCheckMouseClick); + }//end if + } + ); + }();// jQuery.ptTimeSelectInit() + + + /** + * Sets the hour selected by the user on the popup. + * + * @private + * @param {Integer} h - Interger indicating the hour. This value + * is the same as the text value displayed on the + * popup under the hour. This value can also be the + * words AM or PM. + * @return {undefined} + * + */ + jQuery.ptTimeSelect.setHr = function(h) { + if ( h.toLowerCase() == "am" + || h.toLowerCase() == "pm" + ) { + jQuery('#ptTimeSelectUserSelAmPm').empty().append(h); + } else { + jQuery('#ptTimeSelectUserSelHr').empty().append(h); + } + };// END setHr() function + + /** + * Sets the minutes selected by the user on the popup. + * + * @private + * @param {Integer} m - interger indicating the minutes. This + * value is the same as the text value displayed on the popup + * under the minutes. + * @return {undefined} + */ + jQuery.ptTimeSelect.setMin = function(m) { + jQuery('#ptTimeSelectUserSelMin').empty().append(m); + };// END setMin() function + + /** + * Takes the time defined by the user and sets it to the input + * element that the popup is currently opened for. + * + * @private + * @return {undefined} + */ + jQuery.ptTimeSelect.setTime = function() { + var tSel = jQuery('#ptTimeSelectUserSelHr').text() + + ":" + + jQuery('#ptTimeSelectUserSelMin').text() + + " " + + jQuery('#ptTimeSelectUserSelAmPm').text(); + jQuery(".isPtTimeSelectActive").val(tSel); + this.closeCntr(); + + };// END setTime() function + + /** + * Displays the time definition area on the page, right below + * the input field. Also sets the custom colors/css on the + * displayed area to what ever the input element options were + * set with. + * + * @private + * @param {String} uId - Id of the element for whom the area will + * be displayed. This ID was created when the + * ptTimeSelect() method was called. + * @return {undefined} + * + */ + jQuery.ptTimeSelect.openCntr = function (ele) { + jQuery.ptTimeSelect.closeCntr(); + jQuery(".isPtTimeSelectActive").removeClass("isPtTimeSelectActive"); + var cntr = jQuery("#ptTimeSelectCntr"); + var i = jQuery(ele).eq(0).addClass("isPtTimeSelectActive"); + var opt = i.data("ptTimeSelectOptions"); + var style = i.offset(); + style['z-index'] = opt.zIndex; + style.top = (style.top + i.outerHeight()); + if (opt.containerWidth) { + style.width = opt.containerWidth; + } + if (opt.containerClass) { + cntr.addClass(opt.containerClass); + } + cntr.css(style); + var hr = 1; + var min = '00'; + var tm = 'AM'; + if (i.val()) { + var re = /([0-9]{1,2}).*:.*([0-9]{2}).*(PM|AM)/i; + var match = re.exec(i.val()); + if (match) { + hr = match[1] || 1; + min = match[2] || '00'; + tm = match[3] || 'AM'; + } + } + cntr.find("#ptTimeSelectUserSelHr").empty().append(hr); + cntr.find("#ptTimeSelectUserSelMin").empty().append(min); + cntr.find("#ptTimeSelectUserSelAmPm").empty().append(tm); + cntr.find(".ptTimeSelectTimeLabelsCntr .ptTimeSelectLeftPane") + .empty().append(opt.hoursLabel); + cntr.find(".ptTimeSelectTimeLabelsCntr .ptTimeSelectRightPane") + .empty().append(opt.minutesLabel); + cntr.find("#ptTimeSelectSetButton a").empty().append(opt.setButtonLabel); + if (opt.onBeforeShow) { + opt.onBeforeShow(i, cntr); + } + cntr.slideDown("fast"); + + };// END openCntr() + + /** + * Closes (hides it) the popup container. + * @private + * @param {Object} i - Optional. The input field for which the + * container is being closed. + * @return {undefined} + */ + jQuery.ptTimeSelect.closeCntr = function(i) { + var e = $("#ptTimeSelectCntr"); + if (e.is(":visible") == true) { + + // If IE, then check to make sure it is realy visible + if (jQuery.support.tbody == false) { + if (!(e[0].offsetWidth > 0) && !(e[0].offsetHeight > 0) ) { + return; + } + } + + jQuery('#ptTimeSelectCntr') + .css("display", "none") + .removeClass() + .css("width", ""); + if (!i) { + i = $(".isPtTimeSelectActive"); + } + if (i) { + var opt = i.removeClass("isPtTimeSelectActive") + .data("ptTimeSelectOptions"); + if (opt && opt.onClose) { + opt.onClose(i); + } + } + } + return; + };//end closeCntr() + + /** + * Closes the timePicker popup if user is not longer focused on the + * input field or the timepicker + * + * @private + * @param {jQueryEvent} ev - Event passed in by jQuery + * @return {undefined} + */ + jQuery.ptTimeSelect._doCheckMouseClick = function(ev){ + if (!$("#ptTimeSelectCntr:visible").length) { + return; + } + if ( !jQuery(ev.target).closest("#ptTimeSelectCntr").length + && jQuery(ev.target).not("input.isPtTimeSelectActive").length ){ + jQuery.ptTimeSelect.closeCntr(); + } + + };// jQuery.ptTimeSelect._doCheckMouseClick + + /** + * FUNCTION: $().ptTimeSelect() + * Attaches a ptTimeSelect widget to each matched element. Matched + * elements must be input fields that accept a values (input field). + * Each element, when focused upon, will display a time selection + * popoup where the user can define a time. + * + * @memberOf jQuery + * + * PARAMS: + * + * @param {Object} [opt] - An object with the options for the time selection widget. + * + * @param {String} [opt.containerClass=""] - A class to be associated with the popup widget. + * + * @param {String} [opt.containerWidth=""] - Css width for the container. + * + * @param {String} [opt.hoursLabel="Hours"] - Label for the Hours. + * + * @param {String} [opt.minutesLabel="Minutes"] - Label for the Mintues container. + * + * @param {String} [opt.setButtonLabel="Set"] - Label for the Set button. + * + * @param {String} [opt.popupImage=""] - The html element (ex. img or text) to be appended next to each + * input field and that will display the time select widget upon + * click. + * + * @param {Integer} [opt.zIndex=10] - Integer for the popup widget z-index. + * + * @param {Function} [opt.onBeforeShow=undefined] - Function to be called before the widget is made visible to the + * user. Function is passed 2 arguments: 1) the input field as a + * jquery object and 2) the popup widget as a jquery object. + * + * @param {Function} [opt.onClose=undefined] - Function to be called after closing the popup widget. Function + * is passed 1 argument: the input field as a jquery object. + * + * @param {Bollean} [opt.onFocusDisplay=true] - True or False indicating if popup is auto displayed upon focus + * of the input field. + * + * + * RETURN: + * @return {jQuery} selection + * + * + * + * EXAMPLE: + * @example + * $('#fooTime').ptTimeSelect(); + * + */ + jQuery.fn.ptTimeSelect = function (opt) { + return this.each(function(){ + if(this.nodeName.toLowerCase() != 'input') return; + var e = jQuery(this); + if (e.hasClass('hasPtTimeSelect')){ + return this; + } + var thisOpt = {}; + thisOpt = $.extend(thisOpt, jQuery.ptTimeSelect.options, opt); + e.addClass('hasPtTimeSelect').data("ptTimeSelectOptions", thisOpt); + + //Wrap the input field in a
          element with + // a unique id for later referencing. + if (thisOpt.popupImage || !thisOpt.onFocusDisplay) { + var img = jQuery(' ' + + thisOpt.popupImage + '' + ) + .data("ptTimeSelectEle", e); + e.after(img); + } + if (thisOpt.onFocusDisplay){ + e.focus(function(){ + jQuery.ptTimeSelect.openCntr(this); + }); + } + return this; + }); + };// End of jQuery.fn.ptTimeSelect + +})(jQuery); diff --git a/routes/add.js b/routes/add.js index f9d37f6d4..819990def 100644 --- a/routes/add.js +++ b/routes/add.js @@ -1,8 +1,20 @@ +const activityTypes = require('../activityTypes.json'); /* * GET add page. */ exports.view = function(req, res){ - res.render('add'); + var data = {}; + var array = []; + var types = activityTypes["types"]; + for(var i = 0; i < types.length; i++) { + var activityType = types[i]; + array.push({ + "type": activityType["name"], + }); + } + data['options'] = array; + console.log("Read Types:", data); + res.render('add', data); }; \ No newline at end of file diff --git a/routes/addConfirmation.js b/routes/addConfirmation.js index 92acdb48c..7ca901e09 100644 --- a/routes/addConfirmation.js +++ b/routes/addConfirmation.js @@ -1,7 +1,8 @@ +'use strict'; -/* - * GET addConfirmation page. - */ +const activityDatabaseFile = 'activities.json'; +const activityTypes = require('../activityTypes.json'); +const fs = require('fs'); exports.view = function(request, res){ var activityName = request.query.activityName; @@ -11,11 +12,112 @@ exports.view = function(request, res){ console.log(activityName, type, startTime, endTime); var newData = { + "error": false, "activityName": activityName, "type": type, "startTime": startTime, "endTime": endTime, }; - res.render('addConfirmation', newData); -}; \ No newline at end of file + // Write to activities file + var response = recordActivity(activityName, type, startTime, endTime); + if(response === '') { + res.render('addConfirmation', newData); + } else { + res.render('addConfirmation', { + "error": true, + "reason": response, + }); + } +}; + +function recordActivity(name, type, startTime, endTime) { + // Sanity checks + + // name can't be empty + if(name.trim() === '') { + return 'Activity name can\'t be empty'; + } + + // type must be defined + var types = activityTypes["types"]; + var validType = false; + for(var i = 0; i < types.length; i++) { + if(types[i]["name"] === type) { + validType = true; + break; + } + } + if(!validType) { + return 'Activity type not valid'; + } + + // start & end time check + if(startTime === '' && endTime === '') { + return 'Time can\'t be empty'; + } else if(startTime === endTime) { + return 'Start and end time are the same'; + } + + var parts = startTime.split(/[: ]/); + if(parts.length != 3) return 'Start time ill-formatted' + var startHour = parseInt(parts[0]); + var startMin = parseInt(parts[1]); + var startDay = parts[2]; + parts = endTime.split(/[: ]/); + if(parts.length != 3) return 'End time ill-formatted' + var endHour = parseInt(parts[0]); + var endMin = parseInt(parts[1]); + var endDay = parts[2]; + + // Calculate duration + var seconds; + if(startHour !== 12 && startDay === 'PM') startHour += 12; + if(endHour !== 12 && endDay === 'PM') endHour += 12; + if(startHour === 12 && startDay === 'AM') startHour = 0; + if(endHour === 12 && endDay === 'AM') endHour = 0; + if(startHour > endHour || (startHour === endHour && startMin > endMin)) { + var hms = startHour + ":" + startMin + ":00"; + var startT = new Date("1970-01-01 " + hms); + hms = endHour + ":" + endMin + ":00"; + var endT = new Date("1970-01-02 " + hms); + seconds = (endT - startT) / 1000; + } else { + var hms = startHour + ":" + startMin + ":00"; + var startT = new Date("1970-01-01 " + hms); + hms = endHour + ":" + endMin + ":00"; + var endT = new Date("1970-01-01 " + hms); + seconds = (endT - startT) / 1000; + } + + // Store information + var activityDatabase = JSON.parse(fs.readFileSync(activityDatabaseFile, 'utf8')); + + // TODO store by username + var date = new Date(); + var key = date.getFullYear() + "-" + date.getMonth() + "-" + date.getDate(); + // console.log("Store to key:", key); + var data = { + "name": name, + "type": type, + "start": startTime, + "end": endTime, + "duration": seconds, + }; + + if(!activityDatabase.hasOwnProperty(key)) { + activityDatabase[key] = [data]; + } else { + var activities = activityDatabase[key]; + activities.push(data); + activityDatabase[key] = activities; + } + + console.log("Updated result:", activityDatabase); + + // Write to file + var data = JSON.stringify(activityDatabase); + fs.writeFileSync(activityDatabaseFile, data, 'utf8'); + + return ''; +} \ No newline at end of file diff --git a/routes/index.js b/routes/index.js index cfb776a2e..00ae2bd74 100644 --- a/routes/index.js +++ b/routes/index.js @@ -1,8 +1,29 @@ +const activityDatabaseFile = "activities.json"; +const fs = require("fs"); + +const day = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']; /* * GET home page. */ exports.view = function(req, res){ - res.render('index'); + var activityDatabase = JSON.parse(fs.readFileSync(activityDatabaseFile, 'utf8')); + + // TODO store by username + var date = new Date(); + var key = date.getFullYear() + "-" + date.getMonth() + "-" + date.getDate(); + var count = 0; + if(activityDatabase.hasOwnProperty(key)) { + count = activityDatabase[key].length; + } + var dateStr = (date.getMonth() + 1) + "/" + date.getDate() + "/" + date.getFullYear(); + var data = { + "noActivity": (count == 0), + "singleActivity": (count == 1), + "activitiesCount": count, + "date": dateStr, + "day": day[date.getDay()], + }; + res.render('index', data); }; \ No newline at end of file diff --git a/routes/report.js b/routes/report.js index 5148a288f..02480f8c7 100644 --- a/routes/report.js +++ b/routes/report.js @@ -1,9 +1,52 @@ -var activity = require("../activity.json"); +const activitiesFile = "activities.json"; +const activityTypesFile = "activityTypes.json"; +const fs = require("fs"); /* * GET report page. */ exports.view = function(req, res){ - res.render('report', activity); + var date = new Date(); + var key = date.getFullYear() + "-" + date.getMonth() + "-" + date.getDate(); + + var activities = JSON.parse(fs.readFileSync(activitiesFile, 'utf8')); + var activityTypes = JSON.parse(fs.readFileSync(activityTypesFile, 'utf8')); + + // Intialize all types and fill in value + var data = {}; + var typeData = []; + for(var i = 0; i < activityTypes["types"].length; i++) { + var type = activityTypes["types"][i]; + type["count"] = 0; + typeData.push(type); + } + console.log("Types gathered:", typeData); + + if(!activities.hasOwnProperty(key)){ + data['hasData'] = false; + } else { + data['hasData'] = true; + + // Generate daily report (default) + // -> TODO generate weekly/monthly report + var dailyActivities = activities[key]; + for(var i = 0; i < dailyActivities.length; i++) { + var activity = dailyActivities[i]; + for(var j = 0; j < typeData.length; j++) { + if(typeData[j]["name"] === activity["type"]) + typeData[j]["count"] += activity["duration"]; + } + } + + // Remove types that have 0 hours + typeData = typeData.filter(function(i) { + return i["count"] !== 0 + }) + data['data'] = typeData; + + console.log("Data gathered:", data); + } + + res.render('report', data); }; \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index 20517a32f..03dbbe16e 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -1,8 +1,11 @@ + + + @@ -38,28 +41,88 @@
          - +
          - + +

          +1 day

          +
          +

          +
          + + \ No newline at end of file diff --git a/views/addConfirmation.handlebars b/views/addConfirmation.handlebars index 50ab8591c..6d78a216e 100644 --- a/views/addConfirmation.handlebars +++ b/views/addConfirmation.handlebars @@ -10,15 +10,6 @@
          -
          -
          - -
          -
          - -
          -
          - + {{#if error}} +

          Something went wrong...

          + {{else}}

          Activity Created!

          -

          {{activityName}}

          -

          {{type}}

          -

          {{startTime}}

          -

          {{endTime}}

          + {{/if}} +
          + {{#if error}} +
          +

          Error:

          +

          {{reason}}

          +
          + {{else}} +
          +

          Activity Name:

          +

          {{activityName}}

          +
          +
          +

          Type:

          +

          {{type}}

          +
          +
          +

          Start Time

          +

          {{startTime}}

          +
          +
          +

          End Time

          +

          {{endTime}}

          +
          + {{/if}} + {{#if error}} +
          Back
          + {{else}} +
          Done
          + {{/if}} +
          - - \ No newline at end of file diff --git a/views/index.handlebars b/views/index.handlebars index f69129595..2ff592854 100644 --- a/views/index.handlebars +++ b/views/index.handlebars @@ -13,9 +13,17 @@

          MobTrack

          -

          Today is Thursday

          -

          02/18/21

          -

          There are 3 activities added so far.

          +

          Today is {{day}}

          +

          {{date}}

          + {{#if singleActivity}} +

          There is {{activitiesCount}} activity added so far.

          + {{else}} + {{#if noActivity}} +

          No activities added.

          + {{else}} +

          There are {{activitiesCount}} activities added so far.

          + {{/if}} + {{/if}}
          diff --git a/views/report.handlebars b/views/report.handlebars index d9fcf5484..6fb4868e3 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -44,7 +44,7 @@
          - + {{#if hasData}} @@ -68,6 +68,14 @@

          + {{else}} +
          +

          There's no data yet!

          +
          + Add Activity +
          +
          + {{/if}}
          @@ -78,32 +86,16 @@ + + + \ No newline at end of file diff --git a/views/home.handlebars b/views/home.handlebars new file mode 100644 index 000000000..636e1545c --- /dev/null +++ b/views/home.handlebars @@ -0,0 +1,57 @@ + + + Test Page + + + + + + + +
          +
          +
          +
          +

          MobTrack

          +

          Hi!

          +

          Today is {{day}}

          +

          {{date}}

          + {{#if singleActivity}} +

          There is {{activitiesCount}} activity added so far.

          + {{else}} + {{#if noActivity}} +

          No activities added.

          + {{else}} +

          There are {{activitiesCount}} activities added so far.

          + {{/if}} + {{/if}} +
          +
          + +
          +
          + + + + + diff --git a/views/index.handlebars b/views/index.handlebars index 2ff592854..73ed77899 100644 --- a/views/index.handlebars +++ b/views/index.handlebars @@ -1,45 +1,146 @@ - - Test Page - - - - - - - -
          -
          -
          -
          -

          MobTrack

          -

          Today is {{day}}

          -

          {{date}}

          - {{#if singleActivity}} -

          There is {{activitiesCount}} activity added so far.

          - {{else}} - {{#if noActivity}} -

          No activities added.

          - {{else}} -

          There are {{activitiesCount}} activities added so far.

          - {{/if}} - {{/if}} + + Test Page + + + + + + + + +
          +
          +
          +
          +

          MobTrack

          +

          Temporary blank for now.

          +
          +
          +
          +
          + +
          Sign Up
          +
          +

          Log In

          + + + +
          Log In
          -
          -
          -
          - + + + diff --git a/views/login.handlebars b/views/login.handlebars deleted file mode 100644 index abdda983e..000000000 --- a/views/login.handlebars +++ /dev/null @@ -1,141 +0,0 @@ - - - Test Page - - - - - - - - -
          -
          -
          -
          -

          MobTrack

          -

          Temporary blank for now.

          -
          -
          -
          -
          - -
          Sign Up
          -
          -

          Log In

          - - - -
          Log In
          -
          -
          -
          - - - - diff --git a/views/report.handlebars b/views/report.handlebars index 6fb4868e3..658ac945e 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -28,7 +28,7 @@
        • Home
        • Add Activity
        • Report
        • -
        • Log Out
        • +
        • Log Out
        • @@ -81,6 +81,16 @@ + + @@ -73,8 +81,6 @@ if(username === '') { // Not logged in window.location.href = '/'; - } else { - document.getElementById('username').innerText = username; } @@ -113,25 +119,31 @@ // on click submit, check error $('#form-submit').click((e) => { - var error = false; + e.preventDefault(); + if($('#endTimeInput').val().trim() === '') { $('#error-text').text("Please select end time"); $('#error-text').css("visibility", "visible"); - error = true; } if($('#startTimeInput').val().trim() === '') { $('#error-text').text("Please select start time"); $('#error-text').css("visibility", "visible"); - error = true; } if($('#activityNameInput').val().trim() === '') { $('#error-text').text("Activity name can't be empty"); $('#error-text').css("visibility", "visible"); - error = true; - } - if(error) { - e.preventDefault(); } + + // Show confirmation + $('#add-confirmation-top').css('visibility', 'visible'); + }); + + // Register confirmation buttons + $('#add-confirmation-submit').click((e) => { + $("#activity-form").submit(); + }); + $('#add-confirmation-cancel').click((e) => { + $('#add-confirmation-top').css('visibility', 'hidden'); }); }); diff --git a/views/addConfirmation.handlebars b/views/addConfirmation.handlebars index bfe023cd6..4e09c4ab4 100644 --- a/views/addConfirmation.handlebars +++ b/views/addConfirmation.handlebars @@ -10,16 +10,6 @@
          - - {{#if error}}

          Something went wrong...

          {{else}} @@ -65,8 +55,6 @@ if(username === '') { // Not logged in window.location.href = '/'; - } else { - document.getElementById('username').innerText = username; } \ No newline at end of file diff --git a/views/home.handlebars b/views/home.handlebars index 636e1545c..66bb8be70 100644 --- a/views/home.handlebars +++ b/views/home.handlebars @@ -17,12 +17,12 @@

          Today is {{day}}

          {{date}}

          {{#if singleActivity}} -

          There is {{activitiesCount}} activity added so far.

          +

          There is {{activitiesCount}} activity added so far today.

          {{else}} {{#if noActivity}} -

          No activities added.

          +

          No activities added today.

          {{else}} -

          There are {{activitiesCount}} activities added so far.

          +

          There are {{activitiesCount}} activities added so far today.

          {{/if}} {{/if}}
          @@ -50,8 +50,7 @@ if(username === '') { // Not logged in window.location.href = '/'; - } else { - document.getElementById('username').innerText = username; } + document.getElementById('username').innerText = username; diff --git a/views/index.handlebars b/views/index.handlebars index 04acf7619..782e5baa6 100644 --- a/views/index.handlebars +++ b/views/index.handlebars @@ -30,12 +30,10 @@
          Log In
          diff --git a/views/report.handlebars b/views/report.handlebars index 71062c84b..d0148ddb1 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -35,47 +35,46 @@

          Weekly Report

          - {{#if hasData}} - - + +
          +

          Report starts on Sunday

          + + - -
            + +
              -
              - -
              - - -
              - Details -
              -
              -
              - Suggestions +
              + +
              + + +
              + Details +
              +
              +
              + Suggestions +
              +
              -
              - {{else}} + +
              -

              There's no data yet!

              -
              - Add Activity -
              +

              There's no data!

              - {{/if}}
              @@ -87,8 +86,6 @@ if(username === '') { // Not logged in window.location.href = '/'; - } else { - document.getElementById('username').innerText = username; } @@ -96,29 +93,43 @@ \ No newline at end of file diff --git a/views/settings.handlebars b/views/settings.handlebars new file mode 100644 index 000000000..f65ef6b09 --- /dev/null +++ b/views/settings.handlebars @@ -0,0 +1,104 @@ + + + + + + + + + + + +
              +
              + +
              +
              + +
              +
              + + +

              Settings

              +
              + + +
              + +
              +
              Save
              +
              + +

              Setting saved!

              +
              +
              + + + + + + \ No newline at end of file diff --git a/views/suggestions.handlebars b/views/suggestions.handlebars new file mode 100644 index 000000000..9ec706b0d --- /dev/null +++ b/views/suggestions.handlebars @@ -0,0 +1,60 @@ + + + + + + + + + + + + + +
              +
              +
              +
              + +
              +
              + +
              +
              + + + +

              Suggestions

              +
              + {{#if hasData}} +
                + {{#each suggestions}} +
              • {{this}}
              • + {{/each}} +
              + {{else}} +

              No suggestions

              + {{/if}} +
              +
              +
              + + + + + + \ No newline at end of file From 36e0b7a241503a36a3e05c558907f7188d0d79d5 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Wed, 24 Feb 2021 16:13:10 -0800 Subject: [PATCH 26/70] Update activities.json --- activities.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/activities.json b/activities.json index 295204f42..c149451e9 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{"2021-1-21":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:10 PM","duration":22200},{"name":"Lunch","type":"Family","start":"12:30 PM","end":"1:30 PM","duration":3600},{"name":"Game","type":"Fun","start":"8:00 PM","end":"12:00 AM","duration":14400},{"name":"COGS 120","type":"School","start":"4:05 AM","end":"11:00 AM","duration":24900},{"name":"COGS 120","type":"School","start":"1:00 AM","end":"11:00 AM","duration":36000}],"2021-1-23":[{"name":"COGS 120","type":"School","start":"8:00 AM","end":"9:00 AM","duration":3600},{"name":"COGS 102B","type":"School","start":"3:30 PM","end":"5:00 PM","duration":5400},{"name":"COGS 120","type":"School","start":"4:00 PM","end":"7:00 PM","duration":10800},{"name":"COGS 120","type":"School","start":"3:00 PM","end":"7:00 PM","duration":14400}]} \ No newline at end of file +{"2021-1-21":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:10 PM","duration":22200},{"name":"Lunch","type":"Family","start":"12:30 PM","end":"1:30 PM","duration":3600},{"name":"Game","type":"Fun","start":"8:00 PM","end":"12:00 AM","duration":14400},{"name":"COGS 120","type":"School","start":"4:05 AM","end":"11:00 AM","duration":24900},{"name":"COGS 120","type":"School","start":"1:00 AM","end":"11:00 AM","duration":36000}],"2021-1-23":[{"name":"COGS 120","type":"School","start":"8:00 AM","end":"9:00 AM","duration":3600},{"name":"COGS 102B","type":"School","start":"3:30 PM","end":"5:00 PM","duration":5400},{"name":"COGS 120","type":"School","start":"4:00 PM","end":"7:00 PM","duration":10800},{"name":"COGS 120","type":"School","start":"3:00 PM","end":"7:00 PM","duration":14400}],"2021-1-24":[{"name":"COGS 120","type":"Fun","start":"4:00 AM","end":"12:00 AM","duration":72000}]} \ No newline at end of file From ba7b5b01f426d8d687db9f293d13c3ed037f71fb Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Wed, 24 Feb 2021 21:24:04 -0800 Subject: [PATCH 27/70] Update users.json --- users.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/users.json b/users.json index 23265688f..9a2d1b9e5 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"harry":{"password":"123","settings":{"startDay":"1"}}} +{"123":{"password":"123","settings":{"startDay":"1"}},"harry":{"password":"123","settings":{"startDay":"1"}},"cogs120":{"password":"120123","settings":{"startDay":"1"}}} \ No newline at end of file From 516c9e22a93c35fcf287dc83e2042fdbf08c8bfb Mon Sep 17 00:00:00 2001 From: Hackerry Date: Wed, 24 Feb 2021 22:12:10 -0800 Subject: [PATCH 28/70] Added external sources --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 994ece49e..7e20c6084 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,11 @@ repo ==== Project Description + +External Sources Used: +1. W3Schools Tutorials: https://www.w3schools.com/ +2. MDN Documentation Example JS code: https://developer.mozilla.org/en-US/docs/Web/JavaScript +3. Jquery Source code and Example code: https://jquery.com/download/ +4. jQuery.ptTimeSelect widget: http://pttimeselect.sourceforge.net/example/index.html +5. Past lab code: https://ixd.ucsd.edu/home/w21/index.php#calendar +6. Google Fonts: https://fonts.google.com/ \ No newline at end of file From dd6d31ea89768c48c2bf645663f6528db58306a3 Mon Sep 17 00:00:00 2001 From: Hackerry Date: Wed, 24 Feb 2021 22:13:17 -0800 Subject: [PATCH 29/70] Update activities.json to use new format --- activities.json | 1 - 1 file changed, 1 deletion(-) diff --git a/activities.json b/activities.json index 176ea069c..00fcdb931 100644 --- a/activities.json +++ b/activities.json @@ -1,2 +1 @@ -{"2021-1-21":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:10 PM","duration":22200},{"name":"Lunch","type":"Family","start":"12:30 PM","end":"1:30 PM","duration":3600},{"name":"Game","type":"Fun","start":"8:00 PM","end":"12:00 AM","duration":14400},{"name":"COGS 120","type":"School","start":"4:05 AM","end":"11:00 AM","duration":24900},{"name":"COGS 120","type":"School","start":"1:00 AM","end":"11:00 AM","duration":36000}],"2021-1-23":[{"name":"COGS 120","type":"School","start":"8:00 AM","end":"9:00 AM","duration":3600},{"name":"COGS 102B","type":"School","start":"3:30 PM","end":"5:00 PM","duration":5400},{"name":"COGS 120","type":"School","start":"4:00 PM","end":"7:00 PM","duration":10800},{"name":"COGS 120","type":"School","start":"3:00 PM","end":"7:00 PM","duration":14400}],"2021-1-24":[{"name":"COGS 120","type":"Fun","start":"4:00 AM","end":"12:00 AM","duration":72000}]} {"harry":{"2/24/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600},{"name":"Sleep","type":"Other","start":"6:00 PM","end":"10:00 PM","duration":14400},{"name":"Lunch","type":"Family","start":"9:00 PM","end":"10:00 PM","duration":3600}],"2/19/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":100000}],"2/5/2021":[{"name":"COGS 120","type":"Fun","start":"10:00 AM","end":"4:00 PM","duration":123456}]}} From bb921367d4ead7a546a391e9955c02128d401d90 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Wed, 24 Feb 2021 22:14:00 -0800 Subject: [PATCH 30/70] update account --- activities.json | 3 +-- users.json | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/activities.json b/activities.json index 176ea069c..9e26dfeeb 100644 --- a/activities.json +++ b/activities.json @@ -1,2 +1 @@ -{"2021-1-21":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:10 PM","duration":22200},{"name":"Lunch","type":"Family","start":"12:30 PM","end":"1:30 PM","duration":3600},{"name":"Game","type":"Fun","start":"8:00 PM","end":"12:00 AM","duration":14400},{"name":"COGS 120","type":"School","start":"4:05 AM","end":"11:00 AM","duration":24900},{"name":"COGS 120","type":"School","start":"1:00 AM","end":"11:00 AM","duration":36000}],"2021-1-23":[{"name":"COGS 120","type":"School","start":"8:00 AM","end":"9:00 AM","duration":3600},{"name":"COGS 102B","type":"School","start":"3:30 PM","end":"5:00 PM","duration":5400},{"name":"COGS 120","type":"School","start":"4:00 PM","end":"7:00 PM","duration":10800},{"name":"COGS 120","type":"School","start":"3:00 PM","end":"7:00 PM","duration":14400}],"2021-1-24":[{"name":"COGS 120","type":"Fun","start":"4:00 AM","end":"12:00 AM","duration":72000}]} -{"harry":{"2/24/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600},{"name":"Sleep","type":"Other","start":"6:00 PM","end":"10:00 PM","duration":14400},{"name":"Lunch","type":"Family","start":"9:00 PM","end":"10:00 PM","duration":3600}],"2/19/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":100000}],"2/5/2021":[{"name":"COGS 120","type":"Fun","start":"10:00 AM","end":"4:00 PM","duration":123456}]}} +{} \ No newline at end of file diff --git a/users.json b/users.json index 9a2d1b9e5..98fe56190 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"123":{"password":"123","settings":{"startDay":"1"}},"harry":{"password":"123","settings":{"startDay":"1"}},"cogs120":{"password":"120123","settings":{"startDay":"1"}}} \ No newline at end of file +{"cogs120":{"password":"120123","settings":{"startDay":"1"}}} \ No newline at end of file From 162a290830396deef2b1ae1706be6e17703ef99c Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 25 Feb 2021 00:20:37 -0800 Subject: [PATCH 31/70] update --- activities.json | 2 +- users.json | 2 +- views/addConfirmation.handlebars | 1 + views/report.handlebars | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/activities.json b/activities.json index 0967ef424..9e26dfeeb 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{} +{} \ No newline at end of file diff --git a/users.json b/users.json index 98fe56190..9e26dfeeb 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"cogs120":{"password":"120123","settings":{"startDay":"1"}}} \ No newline at end of file +{} \ No newline at end of file diff --git a/views/addConfirmation.handlebars b/views/addConfirmation.handlebars index 4e09c4ab4..775efd750 100644 --- a/views/addConfirmation.handlebars +++ b/views/addConfirmation.handlebars @@ -6,6 +6,7 @@ +
              diff --git a/views/report.handlebars b/views/report.handlebars index d0148ddb1..3f456c871 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -37,7 +37,7 @@ + {{!-- + --}}
              diff --git a/views/settings.handlebars b/views/settings.handlebars index f65ef6b09..7a9593ce0 100644 --- a/views/settings.handlebars +++ b/views/settings.handlebars @@ -23,6 +23,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • Help
            • Log Out
            • diff --git a/views/suggestions.handlebars b/views/suggestions.handlebars index 9ec706b0d..9a191e17c 100644 --- a/views/suggestions.handlebars +++ b/views/suggestions.handlebars @@ -28,6 +28,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • Help
            • Log Out
            • From badf534e2548100dec6d9883d23e8c9668e2abce Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 25 Feb 2021 02:08:02 -0800 Subject: [PATCH 33/70] Update activities.json --- activities.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/activities.json b/activities.json index 058ba3c5f..9e26dfeeb 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{"cogs120":{"2/25/2021":[{"name":"123","type":"Fun","start":"1:00 PM","end":"3:00 PM","duration":7200},{"name":"COGS 120","type":"Fun","start":"3:00 PM","end":"4:00 PM","duration":3600},{"name":"COGS 120","type":"Fun","start":"1:00 AM","end":"11:00 PM","duration":79200}]}} \ No newline at end of file +{} \ No newline at end of file From cf6c9720e050a9cad3446cb5d2d77da26630bbde Mon Sep 17 00:00:00 2001 From: Hackerry Date: Thu, 25 Feb 2021 11:55:24 -0800 Subject: [PATCH 34/70] Add details page --- activities.json | 6 +- app.js | 3 + public/css/add-activity.css | 2 +- public/css/details.css | 81 +++++++++++++++++++++++++ public/css/nav-bar.css | 2 +- public/css/suggestions.css | 8 ++- public/css/weekly-report.css | 2 +- public/js/activities.js | 50 +++++++++++++++- routes/details.js | 4 ++ routes/report.js | 2 +- routes/suggestions.js | 2 +- routes/userAction.js | 11 ++++ users.json | 2 +- views/add.handlebars | 4 +- views/details.handlebars | 113 +++++++++++++++++++++++++++++++++++ views/report.handlebars | 21 +++---- views/settings.handlebars | 1 + views/suggestions.handlebars | 7 ++- 18 files changed, 295 insertions(+), 26 deletions(-) create mode 100644 public/css/details.css create mode 100644 routes/details.js create mode 100644 views/details.handlebars diff --git a/activities.json b/activities.json index 9e26dfeeb..f1123a0f5 100644 --- a/activities.json +++ b/activities.json @@ -1 +1,5 @@ -{} \ No newline at end of file +{"harry":{ + "2/25/2021":[{"name":"Sleep","type":"Family","start":"1:00 AM","end":"8:00 AM","duration":25200}], + "2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}], + "7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}] +}} \ No newline at end of file diff --git a/app.js b/app.js index ec31b2e40..1381104fc 100644 --- a/app.js +++ b/app.js @@ -18,6 +18,7 @@ var userAction = require('./routes/userAction'); var settings = require('./routes/settings'); var suggestions = require('./routes/suggestions'); var help = require('./routes/help'); +var details = require('./routes/details'); var app = express(); @@ -52,8 +53,10 @@ app.get('/userSignUp', userAction.signUpAction); app.get('/userLogIn', userAction.logInAction); app.get('/settings', settings.view); app.get('/userRetrieveSettings', userAction.retrieveSettings); +app.get('/userGetActivities', userAction.getActivities); app.get('/userSetSettings', userAction.setSettings); app.get('/suggestions', suggestions.view); +app.get('/details', details.view); http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); diff --git a/public/css/add-activity.css b/public/css/add-activity.css index 4c4e08cce..582e9da03 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -30,7 +30,7 @@ a { } #back-bar, #hamburger-bar { - width: 48%; + width: 90%; display: inline-block; box-sizing: content-box; } diff --git a/public/css/details.css b/public/css/details.css new file mode 100644 index 000000000..bc5366b49 --- /dev/null +++ b/public/css/details.css @@ -0,0 +1,81 @@ +body { + font-family: 'Roboto', sans-serif; +} + +* { + padding: 0; + margin: 0; +} + +a { + text-decoration: none; + color: black; +} + +#root-div { + width: 100%; + margin: 0 auto; +} + +#content-div { + width: 100%; + text-align: center; + margin: 0 auto; +} + +#details-title { + margin-top: 50px; +} + +#activities-list { + width: 80%; + margin: 20px auto; +} + +.activities-div { + width: 100%; + text-align: left; + margin: 10px auto; + border: 2px solid black; + padding: 0 10px; + border-radius: 10px; + box-sizing: border-box; +} + +.activities-date { + text-align: left; + padding: 10px 0; + border-bottom: 2px dashed; + margin: 15px 0; +} + +.activities-title { + display: inline-block; + width: 75%; + margin: 10px 0; +} + +.activities-time { + margin: 15px 0; + width: 75%; + display: inline-block; +} + +.activities-duration { + display: inline-block; +} + +#no-activities, #home-button { + width: 80%; + margin: 20px auto; +} + +#no-activities { + visibility: hidden; +} + +#home-button { + background-color: lightgray; + display: inline-block; + padding: 5px; +} \ No newline at end of file diff --git a/public/css/nav-bar.css b/public/css/nav-bar.css index 79fcc358d..c7083ec75 100644 --- a/public/css/nav-bar.css +++ b/public/css/nav-bar.css @@ -4,7 +4,7 @@ } #back-bar, #hamburger-bar { - width: 48%; + width: 90%; display: inline-block; box-sizing: content-box; } diff --git a/public/css/suggestions.css b/public/css/suggestions.css index a630e0973..7f2c7fbc5 100644 --- a/public/css/suggestions.css +++ b/public/css/suggestions.css @@ -29,7 +29,7 @@ a { } #back-bar, #hamburger-bar { - width: 48%; + width: 90%; display: inline-block; box-sizing: content-box; } @@ -100,4 +100,10 @@ a { text-align: left; width: 60%; margin: 0 auto; +} + +#back-button { + display: inline-block; + background-color: lightgray; + padding: 5px 10px; } \ No newline at end of file diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css index 58cbd11b2..26e3781e4 100644 --- a/public/css/weekly-report.css +++ b/public/css/weekly-report.css @@ -29,7 +29,7 @@ a { } #back-bar, #hamburger-bar { - width: 48%; + width: 90%; display: inline-block; box-sizing: content-box; } diff --git a/public/js/activities.js b/public/js/activities.js index c7b84d5c8..99955087d 100644 --- a/public/js/activities.js +++ b/public/js/activities.js @@ -5,7 +5,7 @@ const fs = require("fs"); const ONE_DAY_TIME = 1000 * 60 * 60 * 24; const WEEK_TO_GEN = 3; -function getActivity(username) { +function getActivitySummary(username) { // Get user setting start date var allUserActivities = JSON.parse(fs.readFileSync(activitiesFile, 'utf8')); var activityTypes = JSON.parse(fs.readFileSync(activityTypesFile, 'utf8')); @@ -59,4 +59,50 @@ function getActivity(username) { return typeData; } -exports.getActivity = getActivity; \ No newline at end of file +function getActivites(username) { + // Get user setting start date + var allUserActivities = JSON.parse(fs.readFileSync(activitiesFile, 'utf8')); + + // Collect activities + var today = new Date(); + today = new Date((today.getMonth() + 1) + "/" + today.getDate() + "/" + today.getFullYear()); + + // Generate weekly report + var temp = {}; + var userActivities = allUserActivities[username]; + for (var date in userActivities) { + var userActivity = userActivities[date]; + + // Calculate how many day prior the activities are for + var lastDate = new Date(date); + var diffTime = today - lastDate; + var diffDays = Math.ceil(diffTime / ONE_DAY_TIME); + + // Store activities by type + var data = { + "date": date, + "activities": userActivity, + }; + temp[diffDays] = data; + } + + // Sort by most recent + var length = Object.keys(temp).length; + var sortedData = []; + for(var i = 0; sortedData.length < length; i++) { + if(temp.hasOwnProperty(i)) { + var data = { + "date": temp[i]['date'], + "activities": temp[i]['activities'], + }; + sortedData.push(data); + } + } + + console.log("Sorted data:", sortedData); + + return sortedData; +} + +exports.getActivitySummary = getActivitySummary; +exports.getActivites = getActivites; \ No newline at end of file diff --git a/routes/details.js b/routes/details.js new file mode 100644 index 000000000..e118b8168 --- /dev/null +++ b/routes/details.js @@ -0,0 +1,4 @@ + +exports.view = function(req, res) { + res.render("details"); +} \ No newline at end of file diff --git a/routes/report.js b/routes/report.js index e75f2d77c..eb19adc19 100644 --- a/routes/report.js +++ b/routes/report.js @@ -10,7 +10,7 @@ exports.view = function(req, res){ var data = {}; // Remove types that have no hours - data['data'] = activityUtil.getActivity(username); + data['data'] = activityUtil.getActivitySummary(username); console.log("Data gathered:", data['data']); diff --git a/routes/suggestions.js b/routes/suggestions.js index f6f14b2cd..5bae47f07 100644 --- a/routes/suggestions.js +++ b/routes/suggestions.js @@ -6,7 +6,7 @@ exports.view = function (req, res) { var data = {}; - var activityData = activityUtil.getActivity(username); + var activityData = activityUtil.getActivitySummary(username); var sum = 0; // Force average diff --git a/routes/userAction.js b/routes/userAction.js index 66fc505c5..fae6ca825 100644 --- a/routes/userAction.js +++ b/routes/userAction.js @@ -3,6 +3,8 @@ const userDatabaseFile = 'users.json'; const fs = require('fs'); +const activityUtil = require("../public/js/activities.js"); + exports.signUpAction = function(req, res){ var username = req.query.username; var password = req.query.password; @@ -54,6 +56,15 @@ exports.logInAction = function(req, res){ } }; +exports.getActivities = function(req, res) { + var username = req.query.username; + + res.json({ + "success": true, + "activities": activityUtil.getActivites(username) + }); +} + exports.retrieveSettings = function(req, res) { var username = req.query.username; diff --git a/users.json b/users.json index 98fe56190..abe6f3cda 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"cogs120":{"password":"120123","settings":{"startDay":"1"}}} \ No newline at end of file +{"cogs120":{"password":"120123","settings":{"startDay":"1"}},"harry":{"password":"123","settings":{"startDay":"1"}}} \ No newline at end of file diff --git a/views/add.handlebars b/views/add.handlebars index f41e52080..d73e54a20 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -14,9 +14,6 @@
              -
              - -
              @@ -28,6 +25,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • All Activities
            • Help
            • Log Out
            • diff --git a/views/details.handlebars b/views/details.handlebars new file mode 100644 index 000000000..5592bcd40 --- /dev/null +++ b/views/details.handlebars @@ -0,0 +1,113 @@ + + + + + + + + + + + + + +
              +
              +

              All Activities

              + +
              Home
              + +
              + +
              + +
              No past activities
              + + +
              +
              + + + + + + + \ No newline at end of file diff --git a/views/report.handlebars b/views/report.handlebars index 91b425ad6..0ed101d40 100644 --- a/views/report.handlebars +++ b/views/report.handlebars @@ -14,9 +14,6 @@
              -
              - -
              @@ -28,6 +25,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • All Activities
            • Help
            • Log Out
            • @@ -59,16 +57,14 @@ --}}
              - - -
              - Details -
              -
              +
              Suggestions
              -
              + +
              + See All Activities +
              @@ -199,5 +195,10 @@ $('#suggestion-button').click(() => { window.location.href = '/suggestions?week=' + $('#week').val(); }); + + // Details button + $('#detail-button').click(() => { + window.location.href = '/details'; + }); \ No newline at end of file diff --git a/views/settings.handlebars b/views/settings.handlebars index 7a9593ce0..abea4eb1b 100644 --- a/views/settings.handlebars +++ b/views/settings.handlebars @@ -23,6 +23,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • All Activities
            • Help
            • Log Out
            • diff --git a/views/suggestions.handlebars b/views/suggestions.handlebars index 9a191e17c..19170c8bd 100644 --- a/views/suggestions.handlebars +++ b/views/suggestions.handlebars @@ -14,9 +14,6 @@
              -
              - -
              @@ -28,6 +25,7 @@
            • Home
            • Add Activity
            • Report
            • +
            • All Activities
            • Help
            • Log Out
            • @@ -44,6 +42,9 @@ {{else}}

              No suggestions

              {{/if}} +
              + Back +
              From 82f33077ba03c8fe47b9d08646f70f89f1a241e9 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 25 Feb 2021 17:43:19 -0800 Subject: [PATCH 35/70] update --- activities.json | 6 +----- users.json | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/activities.json b/activities.json index f1123a0f5..d65e644fa 100644 --- a/activities.json +++ b/activities.json @@ -1,5 +1 @@ -{"harry":{ - "2/25/2021":[{"name":"Sleep","type":"Family","start":"1:00 AM","end":"8:00 AM","duration":25200}], - "2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}], - "7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}] -}} \ No newline at end of file +{"harry":{"2/25/2021":[{"name":"Sleep","type":"Family","start":"1:00 AM","end":"8:00 AM","duration":25200}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}]},"cogs121":{"2/25/2021":[{"name":"Grocery ","type":"Family","start":"8:00 PM","end":"10:00 PM","duration":7200}]}} \ No newline at end of file diff --git a/users.json b/users.json index abe6f3cda..ad92721f3 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"cogs120":{"password":"120123","settings":{"startDay":"1"}},"harry":{"password":"123","settings":{"startDay":"1"}}} \ No newline at end of file +{"cogs120":{"password":"120123","settings":{"startDay":"1"}},"harry":{"password":"123","settings":{"startDay":"1"}},"cogs121":{"password":"121123","settings":{"startDay":"1"}}} \ No newline at end of file From cdfe05f1fe584fc3bf4692433f4d02345645654f Mon Sep 17 00:00:00 2001 From: Hackerry Date: Thu, 25 Feb 2021 18:09:53 -0800 Subject: [PATCH 36/70] Add nav bar to help and all activities page --- views/details.handlebars | 19 +++++++++++++++++++ views/help.handlebars | 33 +++++++++++++++++++++++++-------- 2 files changed, 44 insertions(+), 8 deletions(-) diff --git a/views/details.handlebars b/views/details.handlebars index 5592bcd40..4a9c44fec 100644 --- a/views/details.handlebars +++ b/views/details.handlebars @@ -13,6 +13,24 @@
              +
              +
              + +
              +
              + + +

              All Activities

              Home
              @@ -38,6 +56,7 @@
              + + @@ -11,13 +12,29 @@
              -
              -

              Help

              -

              Since our app is new, we would like to have some guidances in order to have a better idea of how to use the app.

              -

              1. An activity cannot exceed 24 hours per day

              -

              2. The total amount of hours from the collection of activities cannot exceed 24 hours per day

              -

              3. You can flexibly go to different pages by click/tapping the menu button on the top right, and see the options of where to go

              -
              +
              + +
              +
              + + + +
              +

              Help

              +

              Since our app is new, we would like to have some guidances in order to have a better idea of how to use the app.

              +

              1. An activity cannot exceed 24 hours per day

              +

              2. The total amount of hours from the collection of activities cannot exceed 24 hours per day

              +

              3. You can flexibly go to different pages by click/tapping the menu button on the top right, and see the options of where to go

              + From 6f05c3c67e067206c39478aba7ab00825f572ec9 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 25 Feb 2021 18:12:29 -0800 Subject: [PATCH 37/70] Update home.handlebars --- views/home.handlebars | 3 +++ 1 file changed, 3 insertions(+) diff --git a/views/home.handlebars b/views/home.handlebars index b6811a976..6c40b677a 100644 --- a/views/home.handlebars +++ b/views/home.handlebars @@ -35,6 +35,9 @@
              Report
              + +
              All Activities
              +
              Help
              From 6135b0c11c86a936e36f42418bc2732c905ee5c9 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 25 Feb 2021 18:27:26 -0800 Subject: [PATCH 38/70] update "Help" page --- public/css/styles.css | 12 ++++++++++++ views/help.handlebars | 10 ++++++---- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/public/css/styles.css b/public/css/styles.css index 7808ac742..fef79e259 100644 --- a/public/css/styles.css +++ b/public/css/styles.css @@ -99,3 +99,15 @@ a { margin: 10px 0; } +#help-content { + text-align: left; + margin-left: 10%; + margin-right: 10%; +} + +@media only screen and (min-width: 600px) { + #help-content { + margin-left: 20%; + margin-right: 20%; + } +} \ No newline at end of file diff --git a/views/help.handlebars b/views/help.handlebars index 9c1a94e8d..9de1a8b74 100644 --- a/views/help.handlebars +++ b/views/help.handlebars @@ -31,10 +31,12 @@

              Help

              -

              Since our app is new, we would like to have some guidances in order to have a better idea of how to use the app.

              -

              1. An activity cannot exceed 24 hours per day

              -

              2. The total amount of hours from the collection of activities cannot exceed 24 hours per day

              -

              3. You can flexibly go to different pages by click/tapping the menu button on the top right, and see the options of where to go

              +
              +

              Since our app is new, we would like to have some guidances in order to have a better idea of how to use the app.

              +

              1. An activity cannot exceed 24 hours per day.

              +

              2. The total amount of hours from the collection of activities cannot exceed 24 hours per day.

              +

              3. You can flexibly go to different pages by click/tapping the menu button on the top right, and see the options of where to go.

              +
              Log In
              From cda1c1d3d96cccf4ee49e13df1784f2d874e1fc4 Mon Sep 17 00:00:00 2001 From: Hackerry Date: Sun, 28 Feb 2021 12:28:56 -0800 Subject: [PATCH 40/70] Add more activities options --- activities.json | 2 +- app.js | 1 + public/css/details.css | 54 ++++++++++++++ public/js/activities.js | 24 +++++- routes/suggestions.js | 3 +- routes/userAction.js | 13 +++- views/details.handlebars | 157 +++++++++++++++++++++++++++++++++++---- views/report.handlebars | 2 +- 8 files changed, 238 insertions(+), 18 deletions(-) diff --git a/activities.json b/activities.json index d65e644fa..f272a7a62 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{"harry":{"2/25/2021":[{"name":"Sleep","type":"Family","start":"1:00 AM","end":"8:00 AM","duration":25200}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}]},"cogs121":{"2/25/2021":[{"name":"Grocery ","type":"Family","start":"8:00 PM","end":"10:00 PM","duration":7200}]}} \ No newline at end of file +{"harry":{"2/25/2021":[{"name":"COGS 120","type":"Fun","start":"1:00 AM","end":"7:00 AM","duration":21600}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}]},"cogs121":{"2/25/2021":[{"name":"Grocery ","type":"Family","start":"8:00 PM","end":"10:00 PM","duration":7200}]}} \ No newline at end of file diff --git a/app.js b/app.js index 1381104fc..b282dc95a 100644 --- a/app.js +++ b/app.js @@ -54,6 +54,7 @@ app.get('/userLogIn', userAction.logInAction); app.get('/settings', settings.view); app.get('/userRetrieveSettings', userAction.retrieveSettings); app.get('/userGetActivities', userAction.getActivities); +app.get('/userDeleteActivity', userAction.deleteActivity); app.get('/userSetSettings', userAction.setSettings); app.get('/suggestions', suggestions.view); app.get('/details', details.view); diff --git a/public/css/details.css b/public/css/details.css index bc5366b49..da1adea03 100644 --- a/public/css/details.css +++ b/public/css/details.css @@ -40,6 +40,7 @@ a { padding: 0 10px; border-radius: 10px; box-sizing: border-box; + position: relative; } .activities-date { @@ -65,6 +66,14 @@ a { display: inline-block; } +.activity-delete-button { + position: absolute; + display: inline-block; + top: 3px; + right: 6px; + font-size: 1.2em; +} + #no-activities, #home-button { width: 80%; margin: 20px auto; @@ -78,4 +87,49 @@ a { background-color: lightgray; display: inline-block; padding: 5px; + box-sizing: border-box; +} + +#control-buttons { + width: 80%; + margin: 0 auto; +} + +#time-frame { + width: 100%; + box-sizing: content-box; + padding: 3px 0; +} + +#delete-confirmation-top { + height: 100%; + z-index: 2; + position: absolute; + top: 0; + left: 0; + width: 100%; + background-color: rgba(0.5, 0.5, 0.5, 0.4); + visibility: hidden; +} + +#delete-confirmation-div { + position: absolute; + top: 40%; + left: 10%; + width: 70%; + padding: 5%; + text-align: center; + background-color: white; + border-radius: 5px; + border: 2px solid black; +} + +#delete-confirmation-submit, #delete-confirmation-cancel { + width: 40%; + display: inline-block; + margin: 50px 5px 0 5px; + font-weight: bold; + border: 2px solid black; + border-radius: 5px; + padding: 5px; } \ No newline at end of file diff --git a/public/js/activities.js b/public/js/activities.js index 99955087d..18521e469 100644 --- a/public/js/activities.js +++ b/public/js/activities.js @@ -104,5 +104,27 @@ function getActivites(username) { return sortedData; } +function deleteActivity(username, date, index) { + // Get user setting start date + var allUserActivities = JSON.parse(fs.readFileSync(activitiesFile, 'utf8')); + + // Find date and delete based on entry + var activities = allUserActivities[username]; + if(activities.hasOwnProperty(date)) { + activities[date].splice(index, 1); + + // If empty, delete entry + if(activities[date].length == 0) { + delete activities[date]; + } + + var data = JSON.stringify(allUserActivities); + fs.writeFileSync(activitiesFile, data, 'utf8'); + } + + return getActivites(username); +} + exports.getActivitySummary = getActivitySummary; -exports.getActivites = getActivites; \ No newline at end of file +exports.getActivites = getActivites; +exports.deleteActivity = deleteActivity; \ No newline at end of file diff --git a/routes/suggestions.js b/routes/suggestions.js index 5bae47f07..2d20fce08 100644 --- a/routes/suggestions.js +++ b/routes/suggestions.js @@ -11,7 +11,7 @@ exports.view = function (req, res) { var sum = 0; // Force average for (var i in activityData) { - activityData[i].count[week] /= (7*3600); + activityData[i].count[week] /= 3600; sum += activityData[i].count[week]; } if (sum == 0) { @@ -43,6 +43,7 @@ exports.view = function (req, res) { suggestions.push("need less work and / or school time"); } if (funHour < 4) { + console.log('Fun Hour:', funHour); suggestions.push("need more fun time"); } else { diff --git a/routes/userAction.js b/routes/userAction.js index fae6ca825..f47edad81 100644 --- a/routes/userAction.js +++ b/routes/userAction.js @@ -61,7 +61,18 @@ exports.getActivities = function(req, res) { res.json({ "success": true, - "activities": activityUtil.getActivites(username) + "activities": activityUtil.getActivites(username), + }); +} + +exports.deleteActivity = function(req, res) { + var username = req.query.username; + var date = req.query.date; + var index = req.query.index; + + res.json({ + "success": true, + "activities": activityUtil.deleteActivity(username, date, index), }); } diff --git a/views/details.handlebars b/views/details.handlebars index 4a9c44fec..42e6eb341 100644 --- a/views/details.handlebars +++ b/views/details.handlebars @@ -35,12 +35,30 @@
              Home
              +
              + +
              +
              No past activities
              +
              +
              + Are you sure you want to delete the activity?
              +
              Delete
              +
              Cancel
              +
              +
              +
              @@ -57,8 +59,6 @@
              -
              Home
              - From d26e2192a7115c130553177374c86f49fcb30e4e Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 4 Mar 2021 22:21:47 -0800 Subject: [PATCH 68/70] update styling --- activities.json | 2 +- public/css/add-activity.css | 9 +++++++++ public/css/details.css | 6 ++++-- public/css/settings.css | 5 +++++ public/css/styles.css | 3 ++- public/css/weekly-report.css | 7 ++++++- routes/suggestions.js | 6 +++--- views/add.handlebars | 6 ++++-- views/details.handlebars | 2 +- views/help.handlebars | 2 +- views/report.handlebars | 2 +- views/settings.handlebars | 6 +++--- 12 files changed, 40 insertions(+), 16 deletions(-) diff --git a/activities.json b/activities.json index 075226c98..58415274e 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{"harry":{"2/25/2021":[{"name":"COGS 120","type":"School","start":"3:00 AM","end":"10:00 PM","duration":68400}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}],"3/1/2021":[{"name":"Sleep In Class","type":"Family","start":"3:00 PM","end":"6:00 PM","duration":10800},{"name":"Lunch with mom","type":"Family","start":"11:00 AM","end":"12:00 PM","duration":3600}],"3/3/2021":[{"name":"Sleep123123","type":"Other","start":"7:00 AM","end":"11:00 AM","duration":14400},{"name":"COGS 120","type":"Family","start":"1:00 AM","end":"2:45 AM","duration":6300},{"name":"Homework","type":"School","start":"7:00 AM","end":"8:45 AM","duration":6300}]},"cogs121":{"2/28/2021":[{"name":"COGS 120","type":"School","start":"3:00 PM","end":"4:00 PM","duration":3600},{"name":"Music","type":"Fun","start":"2:00 PM","end":"3:00 PM","duration":3600},{"name":"dinner","type":"Family","start":"8:00 PM","end":"9:00 PM","duration":3600},{"name":"123","type":"Other","start":"1:00 AM","end":"2:00 AM","duration":3600},{"name":"Project 1","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}],"3/1/2021":[{"name":"COGS 120","type":"Family","start":"2:00 PM","end":"4:00 PM","duration":7200},{"name":"fun","type":"Fun","start":"2:00 PM","end":"11:00 PM","duration":32400}]},"anwai":{"3/2/2021":[{"name":"Cooking","type":"Family","start":"6:00 AM","end":"7:00 AM","duration":3600},{"name":"Exercise","type":"Other","start":"7:05 AM","end":"8:00 AM","duration":3300},{"name":"Class","type":"School","start":"9:00 AM","end":"1:00 PM","duration":14400}]}} \ No newline at end of file +{"harry":{"2/25/2021":[{"name":"COGS 120","type":"School","start":"3:00 AM","end":"10:00 PM","duration":68400}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}],"3/1/2021":[{"name":"Sleep In Class","type":"Family","start":"3:00 PM","end":"6:00 PM","duration":10800},{"name":"Lunch with mom","type":"Family","start":"11:00 AM","end":"12:00 PM","duration":3600}],"3/3/2021":[{"name":"Sleep123123","type":"Other","start":"7:00 AM","end":"11:00 AM","duration":14400},{"name":"COGS 120","type":"Family","start":"1:00 AM","end":"2:45 AM","duration":6300},{"name":"Homework","type":"School","start":"7:00 AM","end":"8:45 AM","duration":6300}]},"cogs121":{"2/28/2021":[{"name":"COGS 120","type":"School","start":"3:00 PM","end":"4:00 PM","duration":3600},{"name":"dinner","type":"Family","start":"8:00 PM","end":"9:00 PM","duration":3600},{"name":"123","type":"Other","start":"1:00 AM","end":"2:00 AM","duration":3600},{"name":"Project 1","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}],"3/1/2021":[{"name":"COGS 120","type":"Family","start":"2:00 PM","end":"4:00 PM","duration":7200}],"3/4/2021":[{"name":"project CSE 110","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}]},"anwai":{"3/2/2021":[{"name":"Cooking","type":"Family","start":"6:00 AM","end":"7:00 AM","duration":3600},{"name":"Exercise","type":"Other","start":"7:05 AM","end":"8:00 AM","duration":3300},{"name":"Class","type":"School","start":"9:00 AM","end":"1:00 PM","duration":14400}]}} \ No newline at end of file diff --git a/public/css/add-activity.css b/public/css/add-activity.css index 53a26d614..95b7cfee0 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -98,6 +98,15 @@ a { margin: 0 auto; } +#adding-box { + background: linear-gradient(147.47deg, #FFFFFF -44.49%, #ffffff59 49.09%); + backdrop-filter: blur(4px); + /* Note: backdrop-filter has minimal browser support */ + border-radius: 10px; + margin-top: 20px; + padding-top: 2px; +} + .form-entry { width: 80%; margin: 20px auto; diff --git a/public/css/details.css b/public/css/details.css index 9890af9c1..47a5a1876 100644 --- a/public/css/details.css +++ b/public/css/details.css @@ -25,12 +25,13 @@ a { } #details-title { - margin-top: 50px; + margin-top: 5px; + margin-bottom: 30px; } #activities-list { width: 80%; - margin: 20px auto; + margin: 0px auto; } .activities-div { @@ -149,6 +150,7 @@ a { background-color: #009BCF; border-radius: 50px; margin: 10px 0; + margin-top: 15px; color: white; width: 60%; } diff --git a/public/css/settings.css b/public/css/settings.css index 869ea322e..f75ff58bc 100644 --- a/public/css/settings.css +++ b/public/css/settings.css @@ -122,4 +122,9 @@ a { box-sizing: border-box; border-radius: 50px; color: white; + margin-top: 10px; +} + +label { + font-size: 15px; } \ No newline at end of file diff --git a/public/css/styles.css b/public/css/styles.css index a1670a8e1..52ed6c7d2 100644 --- a/public/css/styles.css +++ b/public/css/styles.css @@ -106,12 +106,13 @@ a { text-align: left; margin-left: 10%; margin-right: 10%; + margin-top: 10px; + margin-bottom: 20px; } .question { background-color: white; opacity: 70%; - /* color: white; */ margin: 2px 0; padding: 5px; position: relative; diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css index 2636fc956..01a30718d 100644 --- a/public/css/weekly-report.css +++ b/public/css/weekly-report.css @@ -90,11 +90,16 @@ a { } #week, #scale { - background-color: lightgray; + /* background-color: lightgray; */ box-sizing: content-box; border: none; padding: 5px; text-align: center; + background-color: white; + opacity: 70%; + margin: 2px 0; + /* padding: 5px; */ + /* position: relative; */ } #chart { diff --git a/routes/suggestions.js b/routes/suggestions.js index b2b0f932b..5048c4753 100644 --- a/routes/suggestions.js +++ b/routes/suggestions.js @@ -37,14 +37,14 @@ exports.view = function (req, res) { } // console.log(workHour, funHour, familyHour, schoolHour, otherHour, sum); if (workHour + schoolHour > 12) { - suggestions.push("need less work and / or school time"); + suggestions.push("Please get less work and/or school time, you deserve a break and get some fun. :)"); } else if (funHour < 4) { console.log('Fun Hour:', funHour); - suggestions.push("need more fun time"); + suggestions.push("Please have more fun time, you deserve it. :)"); } else { - suggestions.push("good :) !!!"); + suggestions.push("Awesome! Everything is good so far. :)"); } // Remove types that have no hours diff --git a/views/add.handlebars b/views/add.handlebars index 1b16cea7d..962ca4c60 100644 --- a/views/add.handlebars +++ b/views/add.handlebars @@ -32,8 +32,9 @@
              -

              Add Activity

              +

              Add Activity

              +
              @@ -51,10 +52,11 @@
              - +

              +1 day

              +
              diff --git a/views/details.handlebars b/views/details.handlebars index 69498c620..beb8f26a2 100644 --- a/views/details.handlebars +++ b/views/details.handlebars @@ -34,7 +34,7 @@
              -

              All Activities

              +

              All Activities

              diff --git a/views/settings.handlebars b/views/settings.handlebars index ec94267a2..77cfcd3bf 100644 --- a/views/settings.handlebars +++ b/views/settings.handlebars @@ -31,9 +31,9 @@
              -

              Settings

              +

              Settings

              - + From 4a17c2dbbe18149e7201195447cf9eab0a25af98 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 4 Mar 2021 22:56:54 -0800 Subject: [PATCH 69/70] update styling --- activities.json | 2 +- public/css/add-activity.css | 2 +- public/css/details.css | 2 +- public/css/settings.css | 2 +- public/css/suggestions.css | 2 +- public/css/weekly-report.css | 2 +- users.json | 2 +- views/help.handlebars | 10 +++++++--- views/settings.handlebars | 4 ++-- 9 files changed, 16 insertions(+), 12 deletions(-) diff --git a/activities.json b/activities.json index 58415274e..cc2931d5e 100644 --- a/activities.json +++ b/activities.json @@ -1 +1 @@ -{"harry":{"2/25/2021":[{"name":"COGS 120","type":"School","start":"3:00 AM","end":"10:00 PM","duration":68400}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}],"3/1/2021":[{"name":"Sleep In Class","type":"Family","start":"3:00 PM","end":"6:00 PM","duration":10800},{"name":"Lunch with mom","type":"Family","start":"11:00 AM","end":"12:00 PM","duration":3600}],"3/3/2021":[{"name":"Sleep123123","type":"Other","start":"7:00 AM","end":"11:00 AM","duration":14400},{"name":"COGS 120","type":"Family","start":"1:00 AM","end":"2:45 AM","duration":6300},{"name":"Homework","type":"School","start":"7:00 AM","end":"8:45 AM","duration":6300}]},"cogs121":{"2/28/2021":[{"name":"COGS 120","type":"School","start":"3:00 PM","end":"4:00 PM","duration":3600},{"name":"dinner","type":"Family","start":"8:00 PM","end":"9:00 PM","duration":3600},{"name":"123","type":"Other","start":"1:00 AM","end":"2:00 AM","duration":3600},{"name":"Project 1","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}],"3/1/2021":[{"name":"COGS 120","type":"Family","start":"2:00 PM","end":"4:00 PM","duration":7200}],"3/4/2021":[{"name":"project CSE 110","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}]},"anwai":{"3/2/2021":[{"name":"Cooking","type":"Family","start":"6:00 AM","end":"7:00 AM","duration":3600},{"name":"Exercise","type":"Other","start":"7:05 AM","end":"8:00 AM","duration":3300},{"name":"Class","type":"School","start":"9:00 AM","end":"1:00 PM","duration":14400}]}} \ No newline at end of file +{"harry":{"2/25/2021":[{"name":"COGS 120","type":"School","start":"3:00 AM","end":"10:00 PM","duration":68400}],"2/10/2021":[{"name":"COGS 120","type":"School","start":"10:00 AM","end":"4:00 PM","duration":21600}],"7/10/2020":[{"name":"Work","type":"Work","start":"1:00 AM","end":"8:00 AM","duration":25200}],"3/1/2021":[{"name":"Sleep In Class","type":"Family","start":"3:00 PM","end":"6:00 PM","duration":10800},{"name":"Lunch with mom","type":"Family","start":"11:00 AM","end":"12:00 PM","duration":3600}],"3/3/2021":[{"name":"Sleep123123","type":"Other","start":"7:00 AM","end":"11:00 AM","duration":14400},{"name":"COGS 120","type":"Family","start":"1:00 AM","end":"2:45 AM","duration":6300},{"name":"Homework","type":"School","start":"7:00 AM","end":"8:45 AM","duration":6300}]},"cogs121":{"2/28/2021":[{"name":"COGS 120","type":"School","start":"3:00 PM","end":"4:00 PM","duration":3600},{"name":"dinner","type":"Family","start":"8:00 PM","end":"9:00 PM","duration":3600},{"name":"123","type":"Other","start":"1:00 AM","end":"2:00 AM","duration":3600},{"name":"Project 1","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600}],"3/1/2021":[{"name":"COGS 120","type":"Family","start":"2:00 PM","end":"4:00 PM","duration":7200}],"3/4/2021":[{"name":"project CSE 110","type":"Work","start":"10:00 PM","end":"11:00 PM","duration":3600},{"name":"COGS 102B","type":"School","start":"3:30 PM","end":"4:50 PM","duration":4800}]},"anwai":{"3/2/2021":[{"name":"Cooking","type":"Family","start":"6:00 AM","end":"7:00 AM","duration":3600},{"name":"Exercise","type":"Other","start":"7:05 AM","end":"8:00 AM","duration":3300},{"name":"Class","type":"School","start":"9:00 AM","end":"1:00 PM","duration":14400}]}} \ No newline at end of file diff --git a/public/css/add-activity.css b/public/css/add-activity.css index 95b7cfee0..ea5d72bb2 100644 --- a/public/css/add-activity.css +++ b/public/css/add-activity.css @@ -2,7 +2,7 @@ body { font-family: 'Roboto', sans-serif; width: 100%; height: 100%; - background: linear-gradient(to right bottom, rgba(0, 182, 244, 100%), rgba(0, 182, 244, 40%), rgba(253, 220, 205, 40%), rgba(253, 220, 205, 100%)); + background: linear-gradient(169.02deg, #00B6F4 0.56%, #FDDCCD 62.16%); } * { diff --git a/public/css/details.css b/public/css/details.css index 47a5a1876..4211cffe1 100644 --- a/public/css/details.css +++ b/public/css/details.css @@ -1,6 +1,6 @@ body { font-family: 'Roboto', sans-serif; - background: linear-gradient(to right bottom, rgba(0, 182, 244, 100%), rgba(0, 182, 244, 40%), rgba(253, 220, 205, 40%), rgba(253, 220, 205, 100%)); + background: linear-gradient(169.02deg, #00B6F4 0.56%, #FDDCCD 62.16%); } * { diff --git a/public/css/settings.css b/public/css/settings.css index f75ff58bc..287faf722 100644 --- a/public/css/settings.css +++ b/public/css/settings.css @@ -5,7 +5,7 @@ body { font-family: 'Roboto', sans-serif; - background: linear-gradient(to right bottom, rgba(0, 182, 244, 100%), rgba(0, 182, 244, 40%), rgba(253, 220, 205, 40%), rgba(253, 220, 205, 100%)); + background: linear-gradient(169.02deg, #00B6F4 0.56%, #FDDCCD 62.16%); } a { diff --git a/public/css/suggestions.css b/public/css/suggestions.css index 611754122..977317cd9 100644 --- a/public/css/suggestions.css +++ b/public/css/suggestions.css @@ -1,6 +1,6 @@ body { font-family: 'Roboto', sans-serif; - background: linear-gradient(to right bottom, rgba(0, 182, 244, 100%), rgba(0, 182, 244, 40%), rgba(253, 220, 205, 40%), rgba(253, 220, 205, 100%)); + background: linear-gradient(169.02deg, #00B6F4 0.56%, #FDDCCD 62.16%); } * { diff --git a/public/css/weekly-report.css b/public/css/weekly-report.css index 01a30718d..8ca8e2385 100644 --- a/public/css/weekly-report.css +++ b/public/css/weekly-report.css @@ -5,7 +5,7 @@ body { font-family: 'Roboto', sans-serif; - background: linear-gradient(to right bottom, rgba(0, 182, 244, 100%), rgba(0, 182, 244, 40%), rgba(253, 220, 205, 40%), rgba(253, 220, 205, 100%)); + background: linear-gradient(169.02deg, #00B6F4 0.56%, #FDDCCD 62.16%); } a { diff --git a/users.json b/users.json index cf580936b..c4b9412c5 100644 --- a/users.json +++ b/users.json @@ -1 +1 @@ -{"cogs120":{"password":"120123","settings":{"startDay":"0","durationFmt":"min"}},"harry":{"password":"123","settings":{"startDay":"1","durationFmt":"hrsmin"}},"cogs121":{"password":"121123","settings":{"startDay":"0","durationFmt":"hrs"}},"anwai":{"password":"123","settings":{"startDay":"0","durationFmt":"min"}}} \ No newline at end of file +{"cogs120":{"password":"120123","settings":{"startDay":"0","durationFmt":"min"}},"harry":{"password":"123","settings":{"startDay":"1","durationFmt":"hrsmin"}},"cogs121":{"password":"121123","settings":{"startDay":"1","durationFmt":"hrs"}},"anwai":{"password":"123","settings":{"startDay":"0","durationFmt":"min"}},"cogs122":{"password":"122123","settings":{"startDay":"0","durationFmt":"min"}}} \ No newline at end of file diff --git a/views/help.handlebars b/views/help.handlebars index 6712ad395..a1f3f1bc7 100644 --- a/views/help.handlebars +++ b/views/help.handlebars @@ -35,14 +35,18 @@

              Help

              Since our app is new, we would like to have some guidances in order to have a better idea of how to use the app.

              -

              1. When does a report begin?

              -

              For every week, a report begins on Sunday.

              +

              1. What day does a report begin for every week?

              +

              By default, a report begins on Sunday for every week.

              2. Can an activity exceed 24 hours a day?

              No. An activity cannot exceed 24 hours per day.

              -

              3. Where can I find all the options?

              +

              3. Where can I find all the access options?

              You can flexibly go to different pages by clicking/tapping on the menu button on the top right, and see the options of where to go.

              4. Can I edit the already added activities?

              You can edit these activities by going to "All Activities" page, then click/tap on the specific activities that you would like to edit. After, you will instantly see where to edit them.

              +

              5. Can I change the activity time display format?

              +

              You can change it in Settings page, that has options to change it to either showing "Hours", or showing "Hours and Minutes".

              +

              6. Can I change the weekly starting day for Weekly Report page?

              +

              You can change it to any day in Settings page.

              diff --git a/views/settings.handlebars b/views/settings.handlebars index 77cfcd3bf..bf6875165 100644 --- a/views/settings.handlebars +++ b/views/settings.handlebars @@ -48,8 +48,8 @@
              From 3dea5bf2a69fbfe361d992c7ad83cf19502a85b0 Mon Sep 17 00:00:00 2001 From: Tam Tran Date: Thu, 4 Mar 2021 23:08:36 -0800 Subject: [PATCH 70/70] update styling --- public/css/details.css | 2 +- public/css/styles.css | 1 - public/css/suggestions.css | 1 + views/details.handlebars | 2 +- views/help.handlebars | 4 ++-- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/public/css/details.css b/public/css/details.css index 4211cffe1..741ab93c0 100644 --- a/public/css/details.css +++ b/public/css/details.css @@ -26,7 +26,7 @@ a { #details-title { margin-top: 5px; - margin-bottom: 30px; + margin-bottom: 22px; } #activities-list { diff --git a/public/css/styles.css b/public/css/styles.css index 52ed6c7d2..4c5e94511 100644 --- a/public/css/styles.css +++ b/public/css/styles.css @@ -107,7 +107,6 @@ a { margin-left: 10%; margin-right: 10%; margin-top: 10px; - margin-bottom: 20px; } .question { diff --git a/public/css/suggestions.css b/public/css/suggestions.css index 977317cd9..634e6df17 100644 --- a/public/css/suggestions.css +++ b/public/css/suggestions.css @@ -110,5 +110,6 @@ a { border-radius: 50px; width: 60%; margin: 10px 0; + margin-top: 25px; color: white; } \ No newline at end of file diff --git a/views/details.handlebars b/views/details.handlebars index beb8f26a2..1c2db2a48 100644 --- a/views/details.handlebars +++ b/views/details.handlebars @@ -46,7 +46,7 @@
              -
              Home
              + {{!--
              Home
              --}}
              diff --git a/views/help.handlebars b/views/help.handlebars index a1f3f1bc7..944077463 100644 --- a/views/help.handlebars +++ b/views/help.handlebars @@ -49,11 +49,11 @@

              You can change it to any day in Settings page.

              -
              + {{!-- +
              --}}