From 026bfdb76d02796a7c9cb232b0742ac85e6e1d89 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 19 Nov 2021 11:42:56 -0800 Subject: [PATCH 001/635] Update typescript packaging/dependencies, change license to MIT, add CI script, add README (#1) --- .github/ci.yml | 21 + .gitignore | 1 + .vscode/extensions.json | 7 + .vscode/settings.json | 9 + LICENSE.md | 21 + README.md | 20 + package.json | 8 + typescript/.eslintrc.js | 19 + typescript/.prettierrc.yml | 2 + typescript/jest.config.json | 4 +- typescript/package.json | 43 +- typescript/scripts/validate.ts | 18 +- typescript/src/McapReader.test.ts | 22 +- typescript/src/McapReader.ts | 4 - typescript/src/StreamBuffer.test.ts | 4 - typescript/src/StreamBuffer.ts | 4 - typescript/src/constants.ts | 4 - typescript/src/index.ts | 4 - typescript/src/parse.ts | 16 +- typescript/src/types.ts | 4 - typescript/typings/wasm-lz4.d.ts | 8 + yarn.lock | 3761 +++++++++++++++++++++++++++ 22 files changed, 3944 insertions(+), 60 deletions(-) create mode 100644 .github/ci.yml create mode 100644 .gitignore create mode 100644 .vscode/extensions.json create mode 100644 .vscode/settings.json create mode 100644 LICENSE.md create mode 100644 README.md create mode 100644 package.json create mode 100644 typescript/.eslintrc.js create mode 100644 typescript/.prettierrc.yml create mode 100644 typescript/typings/wasm-lz4.d.ts create mode 100644 yarn.lock diff --git a/.github/ci.yml b/.github/ci.yml new file mode 100644 index 0000000000..8b7be03bc5 --- /dev/null +++ b/.github/ci.yml @@ -0,0 +1,21 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + branches: ["*"] + +jobs: + typescript: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: 16.x + cache: yarn + + - run: yarn install --frozen-lockfile + - run: yarn workspace @foxglove/mcap lint:ci + - run: yarn workspace @foxglove/mcap test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..3c3629e647 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000000..ea892e04c1 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "dbaeumer.vscode-eslint", + "esbenp.prettier-vscode", + "orta.vscode-jest" + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..e36f697419 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,9 @@ +{ + "editor.formatOnSave": true, + "typescript.tsdk": "node_modules/typescript/lib", + "prettier.prettierPath": "./node_modules/prettier", + "eslint.packageManager": "yarn", + "eslint.options": { + "reportUnusedDisableDirectives": true + } +} diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000000..0aa593ea8c --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Foxglove Technologies Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..5928acadb0 --- /dev/null +++ b/README.md @@ -0,0 +1,20 @@ +# Message Capture file format + +Working specification: https://docs.google.com/document/d/1NaC2v0Qlx43661XkrlVncybYiuloDFRtwdyDKlPpS-A/edit + +## Developer quickstart + +### Typescript + +Run lint/tests: + +``` +yarn workspace @foxglove/mcap lint +yarn workspace @foxglove/mcap test +``` + +Read and validate a mcap file: + +``` +yarn workspace @foxglove/mcap validate file.mcap +``` diff --git a/package.json b/package.json new file mode 100644 index 0000000000..51f600c901 --- /dev/null +++ b/package.json @@ -0,0 +1,8 @@ +{ + "private": true, + "workspaces": { + "packages": [ + "typescript" + ] + } +} diff --git a/typescript/.eslintrc.js b/typescript/.eslintrc.js new file mode 100644 index 0000000000..8eb03eee3d --- /dev/null +++ b/typescript/.eslintrc.js @@ -0,0 +1,19 @@ +/* eslint-env node */ +module.exports = { + env: { es2020: true }, + ignorePatterns: ["dist"], + extends: ["plugin:@foxglove/base", "plugin:@foxglove/jest"], + overrides: [ + { + files: ["*.ts", "*.tsx"], + extends: ["plugin:@foxglove/typescript"], + parserOptions: { + project: "tsconfig.json", + tsconfigRootDir: __dirname, + }, + }, + ], + rules: { + "no-warning-comments": ["error", { terms: ["fixme"], location: "anywhere" }], + }, +}; diff --git a/typescript/.prettierrc.yml b/typescript/.prettierrc.yml new file mode 100644 index 0000000000..151b6b9b49 --- /dev/null +++ b/typescript/.prettierrc.yml @@ -0,0 +1,2 @@ +printWidth: 100 +trailingComma: all diff --git a/typescript/jest.config.json b/typescript/jest.config.json index 783c505b4c..bb60c0087c 100644 --- a/typescript/jest.config.json +++ b/typescript/jest.config.json @@ -1,7 +1,7 @@ { - "testMatch": ["/src/**/*.test.ts(x)?"], + "testMatch": ["/src/**/*.test.ts"], "transform": { - "\\.[jt]sx?$": ["babel-jest", { "rootMode": "upward" }] + "^.+\\.ts$": "ts-jest" }, "//": "Native find is slow because it does not exclude files: https://github.com/facebook/jest/pull/11264#issuecomment-825377579", "haste": { "forceNodeFilesystemAPI": true } diff --git a/typescript/package.json b/typescript/package.json index b7a9da3ecd..283c84f178 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -1,11 +1,11 @@ { "name": "@foxglove/mcap", - "description": "Message Capture file reading support in TypeScript", - "license": "MPL-2.0", - "private": true, + "version": "0.1.0", + "description": "Message Capture file support in TypeScript", + "license": "MIT", "repository": { "type": "git", - "url": "https://github.com/foxglove/studio.git" + "url": "https://github.com/foxglove/mcap.git" }, "author": { "name": "Foxglove Technologies", @@ -19,20 +19,41 @@ ], "scripts": { "prepack": "tsc -b", - "validate": "ts-node --project tsconfig.cjs.json scripts/validate.ts" + "lint:ci": "eslint --report-unused-disable-directives .", + "lint": "eslint --report-unused-disable-directives --fix .", + "test": "jest", + "validate": "ts-node --files --project tsconfig.cjs.json scripts/validate.ts" }, "devDependencies": { "@foxglove/crc": "0.0.3", - "@foxglove/rosmsg": "^3.0.0", - "@foxglove/rosmsg-serialization": "^1.2.3", - "@foxglove/rosmsg2-serialization": "^1.0.4", - "@types/lodash": "^4.14.176", + "@foxglove/eslint-plugin": "0.17.1", + "@foxglove/rosmsg": "3.0.0", + "@foxglove/rosmsg-serialization": "1.2.3", + "@foxglove/rosmsg2-serialization": "1.0.4", + "@foxglove/tsconfig": "1.1.0", + "@types/jest": "27.0.3", + "@types/lodash": "4.14.176", + "@types/node": "16.11.9", + "@typescript-eslint/eslint-plugin": "5.4.0", + "@typescript-eslint/parser": "5.4.0", "commander": "8.3.0", + "eslint": "7.32.0", + "eslint-config-prettier": "8.3.0", + "eslint-plugin-es": "4.1.0", + "eslint-plugin-filenames": "1.3.2", + "eslint-plugin-import": "2.25.3", + "eslint-plugin-jest": "25.2.4", + "eslint-plugin-prettier": "4.0.0", + "jest": "27.3.1", "lodash": "4.17.21", + "prettier": "2.4.1", + "ts-jest": "27.0.7", "ts-node": "10.4.0", - "typescript": "4.4.4" + "typescript": "4.4.4", + "wasm-lz4": "2.0.0" }, "dependencies": { - "eventemitter3": "4.0.7" + "eventemitter3": "4.0.7", + "tslib": "^2" } } diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index 24c8ff4c30..3c230de399 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -1,21 +1,15 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - +import { parse as parseMessageDefinition, RosMsgDefinition } from "@foxglove/rosmsg"; +import { LazyMessageReader as ROS1LazyMessageReader } from "@foxglove/rosmsg-serialization"; +import { MessageReader as ROS2MessageReader } from "@foxglove/rosmsg2-serialization"; import { program } from "commander"; import fs from "fs"; import { isEqual } from "lodash"; import { performance } from "perf_hooks"; import decompressLZ4 from "wasm-lz4"; -import { parse as parseMessageDefinition, RosMsgDefinition } from "@foxglove/rosmsg"; -import { LazyMessageReader as ROS1LazyMessageReader } from "@foxglove/rosmsg-serialization"; -import { MessageReader as ROS2MessageReader } from "@foxglove/rosmsg2-serialization"; - import { McapReader, McapRecord, ChannelInfo } from "../src"; function log(...data: unknown[]) { - // eslint-disable-next-line no-restricted-syntax console.log(...data); } @@ -74,7 +68,11 @@ async function validate( } else { throw new Error(`unsupported encoding ${record.encoding}`); } - channelInfoById.set(record.id, { info: record, messageDeserializer, parsedDefinitions }); + channelInfoById.set(record.id, { + info: record, + messageDeserializer, + parsedDefinitions, + }); break; } diff --git a/typescript/src/McapReader.test.ts b/typescript/src/McapReader.test.ts index 6b78a3b691..53a76113ef 100644 --- a/typescript/src/McapReader.test.ts +++ b/typescript/src/McapReader.test.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - import { crc32 } from "@foxglove/crc"; import McapReader from "./McapReader"; @@ -209,7 +205,11 @@ describe("McapReader", () => { formatVersion, ]), ); - expect(reader.nextRecord()).toEqual({ type: "Footer", indexPos: 0n, indexCrc: 0 }); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexPos: 0n, + indexCrc: 0, + }); expect(reader.done()).toBe(true); }); @@ -460,7 +460,11 @@ describe("McapReader", () => { schema: "stuff", data: new Uint8Array([1, 2, 3]).buffer, }); - expect(reader.nextRecord()).toEqual({ type: "Footer", indexPos: 0n, indexCrc: 0 }); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexPos: 0n, + indexCrc: 0, + }); expect(reader.done()).toBe(true); }); @@ -504,7 +508,11 @@ describe("McapReader", () => { schema: "stuff", data: new Uint8Array([1, 2, 3]).buffer, }); - expect(reader.nextRecord()).toEqual({ type: "Footer", indexPos: 0n, indexCrc: 0 }); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexPos: 0n, + indexCrc: 0, + }); expect(reader.done()).toBe(true); }); diff --git a/typescript/src/McapReader.ts b/typescript/src/McapReader.ts index 78adec4e5a..4c946f1275 100644 --- a/typescript/src/McapReader.ts +++ b/typescript/src/McapReader.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - import { crc32 } from "@foxglove/crc"; import StreamBuffer from "./StreamBuffer"; diff --git a/typescript/src/StreamBuffer.test.ts b/typescript/src/StreamBuffer.test.ts index ceefb6b303..c4a01d0726 100644 --- a/typescript/src/StreamBuffer.test.ts +++ b/typescript/src/StreamBuffer.test.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - import StreamBuffer from "./StreamBuffer"; function toArray(view: DataView) { diff --git a/typescript/src/StreamBuffer.ts b/typescript/src/StreamBuffer.ts index 8d30a1763c..6b894ef93c 100644 --- a/typescript/src/StreamBuffer.ts +++ b/typescript/src/StreamBuffer.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - /** * A growable buffer for use when processing a stream of data. */ diff --git a/typescript/src/constants.ts b/typescript/src/constants.ts index c5550bd1ce..84ae8da07c 100644 --- a/typescript/src/constants.ts +++ b/typescript/src/constants.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - export enum RecordType { MIN = 0x01, CHANNEL_INFO = 0x01, diff --git a/typescript/src/index.ts b/typescript/src/index.ts index 6ecd1caa6d..4e02da5ef1 100644 --- a/typescript/src/index.ts +++ b/typescript/src/index.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - export { default as McapReader } from "./McapReader"; export * from "./parse"; export * from "./types"; diff --git a/typescript/src/parse.ts b/typescript/src/parse.ts index c788ec6070..db26cc75ca 100644 --- a/typescript/src/parse.ts +++ b/typescript/src/parse.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - import { isEqual } from "lodash"; import { MCAP_MAGIC, RecordType } from "./constants"; @@ -147,7 +143,10 @@ export function parseRecord( if (!isEqual(existingInfo, record)) { throw new Error(`differing channel infos for ${record.id}`); } - return { record: existingInfo, usedBytes: recordEndOffset - startOffset }; + return { + record: existingInfo, + usedBytes: recordEndOffset - startOffset, + }; } else { channelInfosById.set(id, record); return { record, usedBytes: recordEndOffset - startOffset }; @@ -170,7 +169,12 @@ export function parseRecord( offset += 8; const data = view.buffer.slice(view.byteOffset + offset, view.byteOffset + recordEndOffset); - const record: McapRecord = { type: "Message", channelInfo, timestamp, data }; + const record: McapRecord = { + type: "Message", + channelInfo, + timestamp, + data, + }; return { record, usedBytes: recordEndOffset - startOffset }; } diff --git a/typescript/src/types.ts b/typescript/src/types.ts index 704e8385d7..1c79d18a63 100644 --- a/typescript/src/types.ts +++ b/typescript/src/types.ts @@ -1,7 +1,3 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/ - export type McapMagic = { type: "Magic"; formatVersion: 1; diff --git a/typescript/typings/wasm-lz4.d.ts b/typescript/typings/wasm-lz4.d.ts new file mode 100644 index 0000000000..c20f55014b --- /dev/null +++ b/typescript/typings/wasm-lz4.d.ts @@ -0,0 +1,8 @@ +declare module "wasm-lz4" { + function decompress(buffer: Uint8Array, size: number): Buffer; + namespace decompress { + const isLoaded: Promise; + } + + export default decompress; +} diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000000..40c406076c --- /dev/null +++ b/yarn.lock @@ -0,0 +1,3761 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" + integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.0.tgz#0dfc80309beec8411e65e706461c408b0bb9b431" + integrity sha512-IF4EOMEV+bfYwOmNxGzSnjR2EmQod7f1UXOpZM3l4i4o4QNwzjtJAu/HxdjHq0aYBvdqMuQEY1eg0nqW9ZPORA== + dependencies: + "@babel/highlight" "^7.16.0" + +"@babel/compat-data@^7.16.0": + version "7.16.4" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.16.4.tgz#081d6bbc336ec5c2435c6346b2ae1fb98b5ac68e" + integrity sha512-1o/jo7D+kC9ZjHX5v+EHrdjl3PhxMrLSOTGsOdHJ+KL8HCaEK6ehrVL2RS6oHDZp+L7xLirLrPmQtEng769J/Q== + +"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.2", "@babel/core@^7.7.5": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.16.0.tgz#c4ff44046f5fe310525cc9eb4ef5147f0c5374d4" + integrity sha512-mYZEvshBRHGsIAiyH5PzCFTCfbWfoYbO/jcSdXQSUQu1/pW0xDZAUP7KEc32heqWTAfAHhV9j1vH8Sav7l+JNQ== + dependencies: + "@babel/code-frame" "^7.16.0" + "@babel/generator" "^7.16.0" + "@babel/helper-compilation-targets" "^7.16.0" + "@babel/helper-module-transforms" "^7.16.0" + "@babel/helpers" "^7.16.0" + "@babel/parser" "^7.16.0" + "@babel/template" "^7.16.0" + "@babel/traverse" "^7.16.0" + "@babel/types" "^7.16.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.1.2" + semver "^6.3.0" + source-map "^0.5.0" + +"@babel/generator@^7.16.0", "@babel/generator@^7.7.2": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.16.0.tgz#d40f3d1d5075e62d3500bccb67f3daa8a95265b2" + integrity sha512-RR8hUCfRQn9j9RPKEVXo9LiwoxLPYn6hNZlvUOR8tSnaxlD0p0+la00ZP9/SnRt6HchKr+X0fO2r8vrETiJGew== + dependencies: + "@babel/types" "^7.16.0" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/helper-compilation-targets@^7.16.0": + version "7.16.3" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.3.tgz#5b480cd13f68363df6ec4dc8ac8e2da11363cbf0" + integrity sha512-vKsoSQAyBmxS35JUOOt+07cLc6Nk/2ljLIHwmq2/NM6hdioUaqEXq/S+nXvbvXbZkNDlWOymPanJGOc4CBjSJA== + dependencies: + "@babel/compat-data" "^7.16.0" + "@babel/helper-validator-option" "^7.14.5" + browserslist "^4.17.5" + semver "^6.3.0" + +"@babel/helper-function-name@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.16.0.tgz#b7dd0797d00bbfee4f07e9c4ea5b0e30c8bb1481" + integrity sha512-BZh4mEk1xi2h4HFjWUXRQX5AEx4rvaZxHgax9gcjdLWdkjsY7MKt5p0otjsg5noXw+pB+clMCjw+aEVYADMjog== + dependencies: + "@babel/helper-get-function-arity" "^7.16.0" + "@babel/template" "^7.16.0" + "@babel/types" "^7.16.0" + +"@babel/helper-get-function-arity@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.0.tgz#0088c7486b29a9cb5d948b1a1de46db66e089cfa" + integrity sha512-ASCquNcywC1NkYh/z7Cgp3w31YW8aojjYIlNg4VeJiHkqyP4AzIvr4qx7pYDb4/s8YcsZWqqOSxgkvjUz1kpDQ== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-hoist-variables@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.0.tgz#4c9023c2f1def7e28ff46fc1dbcd36a39beaa81a" + integrity sha512-1AZlpazjUR0EQZQv3sgRNfM9mEVWPK3M6vlalczA+EECcPz3XPh6VplbErL5UoMpChhSck5wAJHthlj1bYpcmg== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-member-expression-to-functions@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.0.tgz#29287040efd197c77636ef75188e81da8bccd5a4" + integrity sha512-bsjlBFPuWT6IWhl28EdrQ+gTvSvj5tqVP5Xeftp07SEuz5pLnsXZuDkDD3Rfcxy0IsHmbZ+7B2/9SHzxO0T+sQ== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-module-imports@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.0.tgz#90538e60b672ecf1b448f5f4f5433d37e79a3ec3" + integrity sha512-kkH7sWzKPq0xt3H1n+ghb4xEMP8k0U7XV3kkB+ZGy69kDk2ySFW1qPi06sjKzFY3t1j6XbJSqr4mF9L7CYVyhg== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-module-transforms@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.16.0.tgz#1c82a8dd4cb34577502ebd2909699b194c3e9bb5" + integrity sha512-My4cr9ATcaBbmaEa8M0dZNA74cfI6gitvUAskgDtAFmAqyFKDSHQo5YstxPbN+lzHl2D9l/YOEFqb2mtUh4gfA== + dependencies: + "@babel/helper-module-imports" "^7.16.0" + "@babel/helper-replace-supers" "^7.16.0" + "@babel/helper-simple-access" "^7.16.0" + "@babel/helper-split-export-declaration" "^7.16.0" + "@babel/helper-validator-identifier" "^7.15.7" + "@babel/template" "^7.16.0" + "@babel/traverse" "^7.16.0" + "@babel/types" "^7.16.0" + +"@babel/helper-optimise-call-expression@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.0.tgz#cecdb145d70c54096b1564f8e9f10cd7d193b338" + integrity sha512-SuI467Gi2V8fkofm2JPnZzB/SUuXoJA5zXe/xzyPP2M04686RzFKFHPK6HDVN6JvWBIEW8tt9hPR7fXdn2Lgpw== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.8.0": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz#5ac822ce97eec46741ab70a517971e443a70c5a9" + integrity sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ== + +"@babel/helper-replace-supers@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.0.tgz#73055e8d3cf9bcba8ddb55cad93fedc860f68f17" + integrity sha512-TQxuQfSCdoha7cpRNJvfaYxxxzmbxXw/+6cS7V02eeDYyhxderSoMVALvwupA54/pZcOTtVeJ0xccp1nGWladA== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.16.0" + "@babel/helper-optimise-call-expression" "^7.16.0" + "@babel/traverse" "^7.16.0" + "@babel/types" "^7.16.0" + +"@babel/helper-simple-access@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.16.0.tgz#21d6a27620e383e37534cf6c10bba019a6f90517" + integrity sha512-o1rjBT/gppAqKsYfUdfHq5Rk03lMQrkPHG1OWzHWpLgVXRH4HnMM9Et9CVdIqwkCQlobnGHEJMsgWP/jE1zUiw== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-split-export-declaration@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.0.tgz#29672f43663e936df370aaeb22beddb3baec7438" + integrity sha512-0YMMRpuDFNGTHNRiiqJX19GjNXA4H0E8jZ2ibccfSxaCogbm3am5WN/2nQNj0YnQwGWM1J06GOcQ2qnh3+0paw== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-validator-identifier@^7.15.7": + version "7.15.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz#220df993bfe904a4a6b02ab4f3385a5ebf6e2389" + integrity sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w== + +"@babel/helper-validator-option@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz#6e72a1fff18d5dfcb878e1e62f1a021c4b72d5a3" + integrity sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow== + +"@babel/helpers@^7.16.0": + version "7.16.3" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.16.3.tgz#27fc64f40b996e7074dc73128c3e5c3e7f55c43c" + integrity sha512-Xn8IhDlBPhvYTvgewPKawhADichOsbkZuzN7qz2BusOM0brChsyXMDJvldWaYMMUNiCQdQzNEioXTp3sC8Nt8w== + dependencies: + "@babel/template" "^7.16.0" + "@babel/traverse" "^7.16.3" + "@babel/types" "^7.16.0" + +"@babel/highlight@^7.10.4", "@babel/highlight@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.0.tgz#6ceb32b2ca4b8f5f361fb7fd821e3fddf4a1725a" + integrity sha512-t8MH41kUQylBtu2+4IQA3atqevA2lRgqA2wyVB/YiWmsDSuylZZuXOUy9ric30hfzauEFfdsuk/eXTRrGrfd0g== + dependencies: + "@babel/helper-validator-identifier" "^7.15.7" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.0", "@babel/parser@^7.16.3", "@babel/parser@^7.7.2": + version "7.16.4" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.4.tgz#d5f92f57cf2c74ffe9b37981c0e72fee7311372e" + integrity sha512-6V0qdPUaiVHH3RtZeLIsc+6pDhbYzHR8ogA8w+f+Wc77DuXto19g2QUwveINoS34Uw+W8/hQDGJCx+i4n7xcng== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.0.tgz#2feeb13d9334cc582ea9111d3506f773174179bb" + integrity sha512-Xv6mEXqVdaqCBfJFyeab0fH2DnUoMsDmhamxsSi4j8nLd4Vtw213WMJr55xxqipC/YVWyPY3K0blJncPYji+dQ== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/template@^7.16.0", "@babel/template@^7.3.3": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" + integrity sha512-MnZdpFD/ZdYhXwiunMqqgyZyucaYsbL0IrjoGjaVhGilz+x8YB++kRfygSOIj1yOtWKPlx7NBp+9I1RQSgsd5A== + dependencies: + "@babel/code-frame" "^7.16.0" + "@babel/parser" "^7.16.0" + "@babel/types" "^7.16.0" + +"@babel/traverse@^7.1.0", "@babel/traverse@^7.16.0", "@babel/traverse@^7.16.3", "@babel/traverse@^7.7.2": + version "7.16.3" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.16.3.tgz#f63e8a938cc1b780f66d9ed3c54f532ca2d14787" + integrity sha512-eolumr1vVMjqevCpwVO99yN/LoGL0EyHiLO5I043aYQvwOJ9eR5UsZSClHVCzfhBduMAsSzgA/6AyqPjNayJag== + dependencies: + "@babel/code-frame" "^7.16.0" + "@babel/generator" "^7.16.0" + "@babel/helper-function-name" "^7.16.0" + "@babel/helper-hoist-variables" "^7.16.0" + "@babel/helper-split-export-declaration" "^7.16.0" + "@babel/parser" "^7.16.3" + "@babel/types" "^7.16.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.16.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.16.0.tgz#db3b313804f96aadd0b776c4823e127ad67289ba" + integrity sha512-PJgg/k3SdLsGb3hhisFvtLOw5ts113klrpLuIPtCJIU+BB24fqq6lf8RWqKJEjzqXR9AEH1rIb5XTqwBHB+kQg== + dependencies: + "@babel/helper-validator-identifier" "^7.15.7" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@cspotcode/source-map-consumer@0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" + integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg== + +"@cspotcode/source-map-support@0.7.0": + version "0.7.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5" + integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA== + dependencies: + "@cspotcode/source-map-consumer" "0.8.0" + +"@eslint/eslintrc@^0.4.3": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" + integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^13.9.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@foxglove/cdr@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@foxglove/cdr/-/cdr-1.2.0.tgz#b8c69b37299ae47bb3ffc0f0eef70d99cf9a0a4b" + integrity sha512-KuPGicuOPA4U5Y+a3YfHe6prQhbDlI6zxaIYhhKhF+2/qTophkOemJEqbJcAOfy1rim7+O5n2S9ATPK63C9GPw== + +"@foxglove/crc@0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@foxglove/crc/-/crc-0.0.3.tgz#04cd8816454e14f1ec48de17c949199b4b3ec9c2" + integrity sha512-DjIZsnL3CyP/yQ/vUYA9cjrD0a/8YXejI5ZmsaOiT16cLfZcTwaCxIN01/ys4jsy+dZCQ/9DnWFn7AEFbiMDaA== + +"@foxglove/eslint-plugin@0.17.1": + version "0.17.1" + resolved "https://registry.yarnpkg.com/@foxglove/eslint-plugin/-/eslint-plugin-0.17.1.tgz#f29fbe7bf538f2721535a9c06ec6ec607ad72f71" + integrity sha512-/Z32WFj7bBfEIdCh0aXTc5mQbVUcm1KvG586FxCq8k0r0oBUml0bAtLIem9IjE7sUWtabSDJWTFLJpHJhlP+XA== + +"@foxglove/rosmsg-serialization@1.2.3": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@foxglove/rosmsg-serialization/-/rosmsg-serialization-1.2.3.tgz#742adc7a322357b0a19aaebb4fe7c2c557f55e92" + integrity sha512-wXYqEtcJAXjJEKxxB18tx/3bbI2peQdbqzIWYnCCf9VwVDLk57O/fGqHLJnH5hUH4I37khE/Q0xU/JXSUrp46A== + dependencies: + "@foxglove/rosmsg" "^2.0.0 || ^3.0.0" + +"@foxglove/rosmsg2-serialization@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@foxglove/rosmsg2-serialization/-/rosmsg2-serialization-1.0.4.tgz#cadb508163a5a12f1b2ad463ee221a48f9168679" + integrity sha512-OF6ZoWO4jKhZZ6N/68fc4FVgXyVMf5JKvnbg/U5gq7owdwDHjWsmMpgs+R2NWW2rffYVZr14kPp1Iu4I+iyvBw== + dependencies: + "@foxglove/cdr" "^1.2.0" + "@foxglove/rosmsg" "^2.0.0 || ^3.0.0" + "@foxglove/rostime" "^1.1.0" + +"@foxglove/rosmsg@3.0.0", "@foxglove/rosmsg@^2.0.0 || ^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@foxglove/rosmsg/-/rosmsg-3.0.0.tgz#10ce39e42dab6804977cda297d0d317649184a16" + integrity sha512-AAb3DMlLcuntUyEDBioDhbjdbKivC1hj7Gnu7QXX8ce+R50V8xs5sfxccsjE6N0Qs+eQxuzLhT2kW3mNXT/gbg== + dependencies: + md5-typescript "^1.0.5" + +"@foxglove/rostime@^1.1.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@foxglove/rostime/-/rostime-1.1.1.tgz#6d9ec8e2264d7a44fa5066ce38cafed84d3c657a" + integrity sha512-EI+rNLYRcrMHeyRsDuWjyDuTPwu9ZgQIk28RTsTxq1jO2zYFHuBni3TTC78BluS1QZD/iLxUeiGCfJl4eizB9A== + +"@foxglove/tsconfig@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@foxglove/tsconfig/-/tsconfig-1.1.0.tgz#48c37fffd6f349c3ee08a60fc62ccf636f3b59a6" + integrity sha512-qZU4MtXVgPhDBFazSEx7yDEuEg8cPHXFQVhBaUABZkCBdcnEE9sxlgEt0gSikF4fRtY6COGIJPVRflnPJXjJKA== + +"@humanwhocodes/config-array@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" + integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg== + dependencies: + "@humanwhocodes/object-schema" "^1.2.0" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.3.1.tgz#e8ea3a475d3f8162f23d69efbfaa9cbe486bee93" + integrity sha512-RkFNWmv0iui+qsOr/29q9dyfKTTT5DCuP31kUwg7rmOKPT/ozLeGLKJKVIiOfbiKyleUZKIrHwhmiZWVe8IMdw== + dependencies: + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.3.1" + jest-util "^27.3.1" + slash "^3.0.0" + +"@jest/core@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.3.1.tgz#04992ef1b58b17c459afb87ab56d81e63d386925" + integrity sha512-DMNE90RR5QKx0EA+wqe3/TNEwiRpOkhshKNxtLxd4rt3IZpCt+RSL+FoJsGeblRZmqdK4upHA/mKKGPPRAifhg== + dependencies: + "@jest/console" "^27.3.1" + "@jest/reporters" "^27.3.1" + "@jest/test-result" "^27.3.1" + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-changed-files "^27.3.0" + jest-config "^27.3.1" + jest-haste-map "^27.3.1" + jest-message-util "^27.3.1" + jest-regex-util "^27.0.6" + jest-resolve "^27.3.1" + jest-resolve-dependencies "^27.3.1" + jest-runner "^27.3.1" + jest-runtime "^27.3.1" + jest-snapshot "^27.3.1" + jest-util "^27.3.1" + jest-validate "^27.3.1" + jest-watcher "^27.3.1" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.3.1.tgz#2182defbce8d385fd51c5e7c7050f510bd4c86b1" + integrity sha512-BCKCj4mOVLme6Tanoyc9k0ultp3pnmuyHw73UHRPeeZxirsU/7E3HC4le/VDb/SMzE1JcPnto+XBKFOcoiJzVw== + dependencies: + "@jest/fake-timers" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + jest-mock "^27.3.0" + +"@jest/fake-timers@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.3.1.tgz#1fad860ee9b13034762cdb94266e95609dfce641" + integrity sha512-M3ZFgwwlqJtWZ+QkBG5NmC23A9w+A6ZxNsO5nJxJsKYt4yguBd3i8TpjQz5NfCX91nEve1KqD9RA2Q+Q1uWqoA== + dependencies: + "@jest/types" "^27.2.5" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.3.1" + jest-mock "^27.3.0" + jest-util "^27.3.1" + +"@jest/globals@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.3.1.tgz#ce1dfb03d379237a9da6c1b99ecfaca1922a5f9e" + integrity sha512-Q651FWiWQAIFiN+zS51xqhdZ8g9b88nGCobC87argAxA7nMfNQq0Q0i9zTfQYgLa6qFXk2cGANEqfK051CZ8Pg== + dependencies: + "@jest/environment" "^27.3.1" + "@jest/types" "^27.2.5" + expect "^27.3.1" + +"@jest/reporters@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.3.1.tgz#28b5c1f5789481e23788048fa822ed15486430b9" + integrity sha512-m2YxPmL9Qn1emFVgZGEiMwDntDxRRQ2D58tiDQlwYTg5GvbFOKseYCcHtn0WsI8CG4vzPglo3nqbOiT8ySBT/w== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.3.1" + "@jest/test-result" "^27.3.1" + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.4" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^4.0.3" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.0.2" + jest-haste-map "^27.3.1" + jest-resolve "^27.3.1" + jest-util "^27.3.1" + jest-worker "^27.3.1" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/source-map@^27.0.6": + version "27.0.6" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.0.6.tgz#be9e9b93565d49b0548b86e232092491fb60551f" + integrity sha512-Fek4mi5KQrqmlY07T23JRi0e7Z9bXTOOD86V/uS0EIW4PClvPDqZOyFlLpNJheS6QI0FNX1CgmPjtJ4EA/2M+g== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.4" + source-map "^0.6.0" + +"@jest/test-result@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.3.1.tgz#89adee8b771877c69b3b8d59f52f29dccc300194" + integrity sha512-mLn6Thm+w2yl0opM8J/QnPTqrfS4FoXsXF2WIWJb2O/GBSyResL71BRuMYbYRsGt7ELwS5JGcEcGb52BNrumgg== + dependencies: + "@jest/console" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.3.1.tgz#4b3bde2dbb05ee74afdae608cf0768e3354683b1" + integrity sha512-siySLo07IMEdSjA4fqEnxfIX8lB/lWYsBPwNFtkOvsFQvmBrL3yj3k3uFNZv/JDyApTakRpxbKLJ3CT8UGVCrA== + dependencies: + "@jest/test-result" "^27.3.1" + graceful-fs "^4.2.4" + jest-haste-map "^27.3.1" + jest-runtime "^27.3.1" + +"@jest/transform@^27.3.1": + version "27.3.1" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.3.1.tgz#ff80eafbeabe811e9025e4b6f452126718455220" + integrity sha512-3fSvQ02kuvjOI1C1ssqMVBKJpZf6nwoCiSu00zAKh5nrp3SptNtZy/8s5deayHnqxhjD9CWDJ+yqQwuQ0ZafXQ== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.2.5" + babel-plugin-istanbul "^6.0.0" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^27.3.1" + jest-regex-util "^27.0.6" + jest-util "^27.3.1" + micromatch "^4.0.4" + pirates "^4.0.1" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.2.5": + version "27.2.5" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.2.5.tgz#420765c052605e75686982d24b061b4cbba22132" + integrity sha512-nmuM4VuDtCZcY+eTpw+0nvstwReMsjPoj7ZR80/BbixulhLaiX+fbv8oeLW8WZlJMcsGQsTmMKT/iTZu1Uy/lQ== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@tsconfig/node10@^1.0.7": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" + integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== + +"@tsconfig/node12@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c" + integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== + +"@tsconfig/node14@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2" + integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== + +"@tsconfig/node16@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" + integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.16" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.16.tgz#bc12c74b7d65e82d29876b5d0baf5c625ac58702" + integrity sha512-EAEHtisTMM+KaKwfWdC3oyllIqswlznXCIVCt7/oRNrh+DhgT4UEBNC/jlADNjvw7UnfbcdkGQcPVZ1xYiLcrQ== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.3" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.3.tgz#f456b4b2ce79137f768aa130d2423d2f0ccfaba5" + integrity sha512-/GWCmzJWqV7diQW54smJZzWbSFf4QYtF71WCKhcx6Ru/tFyQIY2eiiITcCAeuPbNSvT9YCGkVMqqvSk2Z0mXiA== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.14.2" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.14.2.tgz#ffcd470bbb3f8bf30481678fb5502278ca833a43" + integrity sha512-K2waXdXBi2302XUdcHcR1jCeU0LL4TD9HRs/gk0N2Xvrht+G/BfJa4QObBQZfhMdxiCpV3COl5Nfq4uKTeTnJA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" + integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@27.0.3": + version "27.0.3" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-27.0.3.tgz#0cf9dfe9009e467f70a342f0f94ead19842a783a" + integrity sha512-cmmwv9t7gBYt7hNKH5Spu7Kuu/DotGa+Ff+JGRKZ4db5eh8PnKS4LuebJ3YLUoyOyIHraTGyULn23YtEAm0VSg== + dependencies: + jest-diff "^27.0.0" + pretty-format "^27.0.0" + +"@types/json-schema@^7.0.9": + version "7.0.9" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" + integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/lodash@4.14.176": + version "4.14.176" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.176.tgz#641150fc1cda36fbfa329de603bbb175d7ee20c0" + integrity sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ== + +"@types/node@*", "@types/node@16.11.9": + version "16.11.9" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.9.tgz#879be3ad7af29f4c1a5c433421bf99fab7047185" + integrity sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A== + +"@types/prettier@^2.1.5": + version "2.4.2" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.4.2.tgz#4c62fae93eb479660c3bd93f9d24d561597a8281" + integrity sha512-ekoj4qOQYp7CvjX8ZDBgN86w3MqQhLE1hczEJbEIjgFEumDy+na/4AJAbLXfgEWFNB2pKadM5rPFtuSGMWK7xA== + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/yargs-parser@*": + version "20.2.1" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.1.tgz#3b9ce2489919d9e4fea439b76916abc34b2df129" + integrity sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.4.0.tgz#05e711a2e7b68342661fde61bccbd1531c19521a" + integrity sha512-9/yPSBlwzsetCsGEn9j24D8vGQgJkOTr4oMLas/w886ZtzKIs1iyoqFrwsX2fqYEeUwsdBpC21gcjRGo57u0eg== + dependencies: + "@typescript-eslint/experimental-utils" "5.4.0" + "@typescript-eslint/scope-manager" "5.4.0" + debug "^4.3.2" + functional-red-black-tree "^1.0.1" + ignore "^5.1.8" + regexpp "^3.2.0" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@5.4.0", "@typescript-eslint/experimental-utils@^5.0.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.4.0.tgz#238a7418d2da3b24874ba35385eb21cc61d2a65e" + integrity sha512-Nz2JDIQUdmIGd6p33A+naQmwfkU5KVTLb/5lTk+tLVTDacZKoGQisj8UCxk7onJcrgjIvr8xWqkYI+DbI3TfXg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.4.0" + "@typescript-eslint/types" "5.4.0" + "@typescript-eslint/typescript-estree" "5.4.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/parser@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.4.0.tgz#3aa83ce349d66e39b84151f6d5464928044ca9e3" + integrity sha512-JoB41EmxiYpaEsRwpZEYAJ9XQURPFer8hpkIW9GiaspVLX8oqbqNM8P4EP8HOZg96yaALiLEVWllA2E8vwsIKw== + dependencies: + "@typescript-eslint/scope-manager" "5.4.0" + "@typescript-eslint/types" "5.4.0" + "@typescript-eslint/typescript-estree" "5.4.0" + debug "^4.3.2" + +"@typescript-eslint/scope-manager@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.4.0.tgz#aaab08415f4a9cf32b870c7750ae8ba4607126a1" + integrity sha512-pRxFjYwoi8R+n+sibjgF9iUiAELU9ihPBtHzocyW8v8D8G8KeQvXTsW7+CBYIyTYsmhtNk50QPGLE3vrvhM5KA== + dependencies: + "@typescript-eslint/types" "5.4.0" + "@typescript-eslint/visitor-keys" "5.4.0" + +"@typescript-eslint/types@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.4.0.tgz#b1c130f4b381b77bec19696c6e3366f9781ce8f2" + integrity sha512-GjXNpmn+n1LvnttarX+sPD6+S7giO+9LxDIGlRl4wK3a7qMWALOHYuVSZpPTfEIklYjaWuMtfKdeByx0AcaThA== + +"@typescript-eslint/typescript-estree@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.4.0.tgz#fe524fb308973c68ebeb7428f3b64499a6ba5fc0" + integrity sha512-nhlNoBdhKuwiLMx6GrybPT3SFILm5Gij2YBdPEPFlYNFAXUJWX6QRgvi/lwVoadaQEFsizohs6aFRMqsXI2ewA== + dependencies: + "@typescript-eslint/types" "5.4.0" + "@typescript-eslint/visitor-keys" "5.4.0" + debug "^4.3.2" + globby "^11.0.4" + is-glob "^4.0.3" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/visitor-keys@5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.4.0.tgz#09bc28efd3621f292fe88c86eef3bf4893364c8c" + integrity sha512-PVbax7MeE7tdLfW5SA0fs8NGVVr+buMPrcj+CWYWPXsZCH8qZ1THufDzbXm1xrZ2b2PA1iENJ0sRq5fuUtvsJg== + dependencies: + "@typescript-eslint/types" "5.4.0" + eslint-visitor-keys "^3.0.0" + +abab@^2.0.3, abab@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" + integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^7.1.1, acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.4.1: + version "8.6.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.6.0.tgz#e3692ba0eb1a0c83eaa4f37f5fa7368dd7142895" + integrity sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.1: + version "8.8.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.8.1.tgz#e73dd88eeb4b10bbcd82bee136e6fbe801664d18" + integrity sha512-6CiMNDrzv0ZR916u2T+iRunnD60uWmNn8SkdB44/6stVORUg0aAkWO7PkOhpCmjmW8f2I/G/xnowD66fxGyQJg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +array-includes@^3.1.4: + version "3.1.4" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.4.tgz#f5b493162c760f3539631f005ba2bb46acb45ba9" + integrity sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz#07e0975d84bbc7c48cd1879d609e682598d33e13" + integrity sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +babel-jest@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.3.1.tgz#0636a3404c68e07001e434ac4956d82da8a80022" + integrity sha512-SjIF8hh/ir0peae2D6S6ZKRhUy7q/DnpH7k/V6fT4Bgs/LXXUztOpX4G2tCgq8mLo5HA9mN6NmlFMeYtKmIsTQ== + dependencies: + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.0.0" + babel-preset-jest "^27.2.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + slash "^3.0.0" + +babel-plugin-istanbul@^6.0.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.2.0: + version "27.2.0" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.2.0.tgz#79f37d43f7e5c4fdc4b2ca3e10cc6cf545626277" + integrity sha512-TOux9khNKdi64mW+0OIhcmbAn75tTlzKhxmiNXevQaPbrBYK7YKjP1jl6NHTJ6XR5UgUrJbCnWlKVnJn29dfjw== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.2.0: + version "27.2.0" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.2.0.tgz#556bbbf340608fed5670ab0ea0c8ef2449fba885" + integrity sha512-z7MgQ3peBwN5L5aCqBKnF6iqdlvZvFUQynEhu0J+X9nHLU72jO3iY331lcYrg+AssJ8q7xsv5/3AICzVmJ/wvg== + dependencies: + babel-plugin-jest-hoist "^27.2.0" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.17.5: + version "4.18.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.18.1.tgz#60d3920f25b6860eb917c6c7b185576f4d8b017f" + integrity sha512-8ScCzdpPwR2wQh8IT82CA2VgDwjHyqMovPBZSNH54+tm4Jk2pCuv90gmAdH6J84OCRWi0b4gMe6O6XPXuJnjgQ== + dependencies: + caniuse-lite "^1.0.30001280" + electron-to-chromium "^1.3.896" + escalade "^3.1.1" + node-releases "^2.0.1" + picocolors "^1.0.0" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.1.tgz#250fd350cfd555d0d2160b1d51510eaf8326e86e" + integrity sha512-tVI4q5jjFV5CavAU8DXfza/TJcZutVKo/5Foskmsqcm0MsL91moHvwiGNnqaa2o6PF/7yT5ikDRcVcl8Rj6LCA== + +caniuse-lite@^1.0.30001280: + version "1.0.30001282" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001282.tgz#38c781ee0a90ccfe1fe7fefd00e43f5ffdcb96fd" + integrity sha512-YhF/hG6nqBEllymSIjLtR2iWDDnChvhnVJqp+vloyt2tEHFG1yBR+ac2B/rOw0qOK0m0lEXU2dv4E/sMk5P9Kg== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +ci-info@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.2.0.tgz#2876cb948a498797b5236f0095bc057d0dca38b6" + integrity sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" + integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== + dependencies: + ms "2.1.2" + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.3.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" + integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^27.0.6: + version "27.0.6" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.0.6.tgz#3305cb2e55a033924054695cc66019fd7f8e5723" + integrity sha512-ag6wfpBFyNXZ0p8pcuIDS//D8H062ZQJ3fzYxjpmeKjnz8W4pekL3AI8VohmyZmsWW2PWaHgjsmqR6L13101VQ== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +electron-to-chromium@^1.3.896: + version "1.3.903" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.903.tgz#e2d3c3809f4ef05fdbe5cc88969dfc94b1bd15b9" + integrity sha512-+PnYAyniRRTkNq56cqYDLq9LyklZYk0hqoDy9GpcU11H5QjRmFZVDbxtgHUMK/YzdNTcn1XWP5gb+hFlSCr20g== + +emittery@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +es-abstract@^1.19.0, es-abstract@^1.19.1: + version "1.19.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.19.1.tgz#d4885796876916959de78edaa0df456627115ec3" + integrity sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-symbols "^1.0.2" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.1" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.1" + is-string "^1.0.7" + is-weakref "^1.0.1" + object-inspect "^1.11.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + string.prototype.trimend "^1.0.4" + string.prototype.trimstart "^1.0.4" + unbox-primitive "^1.0.1" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-prettier@8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.3.0.tgz#f7471b20b6fe8a9a9254cc684454202886a2dd7a" + integrity sha512-BgZuLUSeKzvlL/VUjx/Yb787VQ26RU3gGjA3iiFvdsp/2bMfVIWUVP7tjxtjS0e+HP409cPlPvNkQloz8C91ew== + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.1.tgz#b435001c9f8dd4ab7f6d0efcae4b9696d4c24b7c" + integrity sha512-fjoetBXQZq2tSTWZ9yWVl2KuFrTZZH3V+9iD1V1RfpDgxzJR+mPd/KZmMiA8gbPqdBzpNiEHOuT7IYEWxrH0zQ== + dependencies: + debug "^3.2.7" + find-up "^2.1.0" + pkg-dir "^2.0.0" + +eslint-plugin-es@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz#f0822f0c18a535a97c3e714e89f88586a7641ec9" + integrity sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ== + dependencies: + eslint-utils "^2.0.0" + regexpp "^3.0.0" + +eslint-plugin-filenames@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-filenames/-/eslint-plugin-filenames-1.3.2.tgz#7094f00d7aefdd6999e3ac19f72cea058e590cf7" + integrity sha512-tqxJTiEM5a0JmRCUYQmxw23vtTxrb2+a3Q2mMOPhFxvt7ZQQJmdiuMby9B/vUAuVMghyP7oET+nIf6EO6CBd/w== + dependencies: + lodash.camelcase "4.3.0" + lodash.kebabcase "4.1.1" + lodash.snakecase "4.1.1" + lodash.upperfirst "4.3.1" + +eslint-plugin-import@2.25.3: + version "2.25.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.25.3.tgz#a554b5f66e08fb4f6dc99221866e57cfff824766" + integrity sha512-RzAVbby+72IB3iOEL8clzPLzL3wpDrlwjsTBAQXgyp5SeTqqY+0bFubwuo+y/HLhNZcXV4XqTBO4LGsfyHIDXg== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.1" + has "^1.0.3" + is-core-module "^2.8.0" + is-glob "^4.0.3" + minimatch "^3.0.4" + object.values "^1.1.5" + resolve "^1.20.0" + tsconfig-paths "^3.11.0" + +eslint-plugin-jest@25.2.4: + version "25.2.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.2.4.tgz#bb9f6a0bd1fd524ffb0b8b7a159cd70a58a1a793" + integrity sha512-HRyinpgmEdkVr7pNPaYPHCoGqEzpgk79X8pg/xCeoAdurbyQjntJQ4pTzHl7BiVEBlam/F1Qsn+Dk0HtJO7Aaw== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-prettier@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.0.0.tgz#8b99d1e4b8b24a762472b4567992023619cb98e0" + integrity sha512-98MqmCJ7vJodoQK359bqQWaxOE0CS8paAz/GgjaZLyex4TTk3g9HugoO89EqWCrFiOqn9EVvcoo7gZzONCWVwQ== + dependencies: + prettier-linter-helpers "^1.0.0" + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.0.0, eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.1.0.tgz#eee4acea891814cda67a7d8812d9647dd0179af2" + integrity sha512-yWJFpu4DtjsWKkt5GeNBBuZMlNcYVs6vRCLoCVEJrTjaSB6LC98gFipNK/erM2Heg/E8mIK+hXG/pJMLK+eRZA== + +eslint@7.32.0: + version "7.32.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d" + integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA== + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.3" + "@humanwhocodes/config-array" "^0.5.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + escape-string-regexp "^4.0.0" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.1.2" + globals "^13.6.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^6.0.9" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0, espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +eventemitter3@4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= + +expect@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/expect/-/expect-27.3.1.tgz#d0f170b1f5c8a2009bab0beffd4bb94f043e38e7" + integrity sha512-MrNXV2sL9iDRebWPGOGFdPQRl2eDQNu/uhxIMShjjx74T6kC6jFIkmQ6OqXDtevjGUkyB2IT56RzDBqXf/QPCg== + dependencies: + "@jest/types" "^27.2.5" + ansi-styles "^5.0.0" + jest-get-type "^27.3.1" + jest-matcher-utils "^27.3.1" + jest-message-util "^27.3.1" + jest-regex-util "^27.0.6" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-diff@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + +fast-glob@^3.1.1: + version "3.2.7" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" + integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.4" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.4.tgz#28d9969ea90661b5134259f312ab6aa7929ac5e2" + integrity sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw== + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.12.0.tgz#4d733760304230a0082ed96e21e5c565f898089e" + integrity sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4: + version "11.0.4" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" + integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.1.1" + ignore "^5.1.4" + merge2 "^1.3.0" + slash "^3.0.0" + +graceful-fs@^4.2.4: + version "4.2.8" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" + integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== + +has-bigints@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" + integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.1, has-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" + integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +https-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.1.4, ignore@^5.1.8: + version "5.1.9" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.9.tgz#9ec1a5cbe8e1446ec60d4420060d43aa6e7382fb" + integrity sha512-2zeMQpbKz5dhZ9IwL0gbxSW5w0NK/MSAMtNuhgIHEPmaU3vPdKPL0UdvUCXs5SS4JAwsBxysK5sFMW8ocFiVjQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.3.tgz#4d51c2c495ca9393da259ec66b62e022920211e0" + integrity sha512-bE9iaUY3CXH8Cwfan/abDKAxe1KGT9kyGsBPqf6DMK/z0a2OzAsrukeYNgIH6cH5Xr452jb1TUL8rSfCLjZ9uA== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-core-module@^2.2.0, is-core-module@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.0.tgz#0321336c3d0925e497fd97f5d95cb114a5ccd548" + integrity sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" + integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== + +is-number-object@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.6.tgz#6a7aaf838c7f0686a50b4553f7e54a96494e89f0" + integrity sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz#97b0c85fbdacb59c9c446fe653b82cf2b5b7cfe6" + integrity sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-weakref@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.1.tgz#842dba4ec17fa9ac9850df2d6efbc1737274f2a2" + integrity sha512-b2jKc2pQZjaeFYWEf7ScFj+Be1I+PXmlu572Q8coTXZ+LD/QQZ7ShPMst8h16riVgyXTQwUsFEl74mDvc/3MHQ== + dependencies: + call-bind "^1.0.0" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" + integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== + dependencies: + "@babel/core" "^7.7.5" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.0.0" + semver "^6.3.0" + +istanbul-lib-instrument@^5.0.4: + version "5.1.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz#7b49198b657b27a730b8e9cb601f1e1bff24c59a" + integrity sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.0.2: + version "3.0.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.5.tgz#a2580107e71279ea6d661ddede929ffc6d693384" + integrity sha512-5+19PlhnGabNWB7kOFnuxT8H3T/iIyQzIbQMxXsURmmvKg86P2sbkrGOT77VnHw0Qr0gc2XzRaRfMZYYbSQCJQ== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^27.3.0: + version "27.3.0" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.3.0.tgz#22a02cc2b34583fc66e443171dc271c0529d263c" + integrity sha512-9DJs9garMHv4RhylUMZgbdCJ3+jHSkpL9aaVKp13xtXAD80qLTLrqcDZL1PHA9dYA0bCI86Nv2BhkLpLhrBcPg== + dependencies: + "@jest/types" "^27.2.5" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.3.1.tgz#1679e74387cbbf0c6a8b42de963250a6469e0797" + integrity sha512-v1dsM9II6gvXokgqq6Yh2jHCpfg7ZqV4jWY66u7npz24JnhP3NHxI0sKT7+ZMQ7IrOWHYAaeEllOySbDbWsiXw== + dependencies: + "@jest/environment" "^27.3.1" + "@jest/test-result" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.3.1" + is-generator-fn "^2.0.0" + jest-each "^27.3.1" + jest-matcher-utils "^27.3.1" + jest-message-util "^27.3.1" + jest-runtime "^27.3.1" + jest-snapshot "^27.3.1" + jest-util "^27.3.1" + pretty-format "^27.3.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.3.1.tgz#b576f9d146ba6643ce0a162d782b40152b6b1d16" + integrity sha512-WHnCqpfK+6EvT62me6WVs8NhtbjAS4/6vZJnk7/2+oOr50cwAzG4Wxt6RXX0hu6m1169ZGMlhYYUNeKBXCph/Q== + dependencies: + "@jest/core" "^27.3.1" + "@jest/test-result" "^27.3.1" + "@jest/types" "^27.2.5" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + import-local "^3.0.2" + jest-config "^27.3.1" + jest-util "^27.3.1" + jest-validate "^27.3.1" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.3.1.tgz#cb3b7f6aaa8c0a7daad4f2b9573899ca7e09bbad" + integrity sha512-KY8xOIbIACZ/vdYCKSopL44I0xboxC751IX+DXL2+Wx6DKNycyEfV3rryC3BPm5Uq/BBqDoMrKuqLEUNJmMKKg== + dependencies: + "@babel/core" "^7.1.0" + "@jest/test-sequencer" "^27.3.1" + "@jest/types" "^27.2.5" + babel-jest "^27.3.1" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.4" + jest-circus "^27.3.1" + jest-environment-jsdom "^27.3.1" + jest-environment-node "^27.3.1" + jest-get-type "^27.3.1" + jest-jasmine2 "^27.3.1" + jest-regex-util "^27.0.6" + jest-resolve "^27.3.1" + jest-runner "^27.3.1" + jest-util "^27.3.1" + jest-validate "^27.3.1" + micromatch "^4.0.4" + pretty-format "^27.3.1" + +jest-diff@^27.0.0, jest-diff@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.3.1.tgz#d2775fea15411f5f5aeda2a5e02c2f36440f6d55" + integrity sha512-PCeuAH4AWUo2O5+ksW4pL9v5xJAcIKPUPfIhZBcG1RKv/0+dvaWTQK1Nrau8d67dp65fOqbeMdoil+6PedyEPQ== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.0.6" + jest-get-type "^27.3.1" + pretty-format "^27.3.1" + +jest-docblock@^27.0.6: + version "27.0.6" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.0.6.tgz#cc78266acf7fe693ca462cbbda0ea4e639e4e5f3" + integrity sha512-Fid6dPcjwepTFraz0YxIMCi7dejjJ/KL9FBjPYhBp4Sv1Y9PdhImlKZqYU555BlN4TQKaTc+F2Av1z+anVyGkA== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.3.1.tgz#14c56bb4f18dd18dc6bdd853919b5f16a17761ff" + integrity sha512-E4SwfzKJWYcvOYCjOxhZcxwL+AY0uFMvdCOwvzgutJiaiodFjkxQQDxHm8FQBeTqDnSmKsQWn7ldMRzTn2zJaQ== + dependencies: + "@jest/types" "^27.2.5" + chalk "^4.0.0" + jest-get-type "^27.3.1" + jest-util "^27.3.1" + pretty-format "^27.3.1" + +jest-environment-jsdom@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.3.1.tgz#63ac36d68f7a9303494df783494856222b57f73e" + integrity sha512-3MOy8qMzIkQlfb3W1TfrD7uZHj+xx8Olix5vMENkj5djPmRqndMaXtpnaZkxmxM+Qc3lo+yVzJjzuXbCcZjAlg== + dependencies: + "@jest/environment" "^27.3.1" + "@jest/fake-timers" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + jest-mock "^27.3.0" + jest-util "^27.3.1" + jsdom "^16.6.0" + +jest-environment-node@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.3.1.tgz#af7d0eed04edafb740311b303f3fe7c8c27014bb" + integrity sha512-T89F/FgkE8waqrTSA7/ydMkcc52uYPgZZ6q8OaZgyiZkJb5QNNCF6oPZjH9IfPFfcc9uBWh1574N0kY0pSvTXw== + dependencies: + "@jest/environment" "^27.3.1" + "@jest/fake-timers" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + jest-mock "^27.3.0" + jest-util "^27.3.1" + +jest-get-type@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.3.1.tgz#a8a2b0a12b50169773099eee60a0e6dd11423eff" + integrity sha512-+Ilqi8hgHSAdhlQ3s12CAVNd8H96ZkQBfYoXmArzZnOfAtVAJEiPDBirjByEblvG/4LPJmkL+nBqPO3A1YJAEg== + +jest-haste-map@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.3.1.tgz#7656fbd64bf48bda904e759fc9d93e2c807353ee" + integrity sha512-lYfNZIzwPccDJZIyk9Iz5iQMM/MH56NIIcGj7AFU1YyA4ewWFBl8z+YPJuSCRML/ee2cCt2y3W4K3VXPT6Nhzg== + dependencies: + "@jest/types" "^27.2.5" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.4" + jest-regex-util "^27.0.6" + jest-serializer "^27.0.6" + jest-util "^27.3.1" + jest-worker "^27.3.1" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.3.1.tgz#df6d3d07c7dafc344feb43a0072a6f09458d32b0" + integrity sha512-WK11ZUetDQaC09w4/j7o4FZDUIp+4iYWH/Lik34Pv7ukL+DuXFGdnmmi7dT58J2ZYKFB5r13GyE0z3NPeyJmsg== + dependencies: + "@babel/traverse" "^7.1.0" + "@jest/environment" "^27.3.1" + "@jest/source-map" "^27.0.6" + "@jest/test-result" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.3.1" + is-generator-fn "^2.0.0" + jest-each "^27.3.1" + jest-matcher-utils "^27.3.1" + jest-message-util "^27.3.1" + jest-runtime "^27.3.1" + jest-snapshot "^27.3.1" + jest-util "^27.3.1" + pretty-format "^27.3.1" + throat "^6.0.1" + +jest-leak-detector@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.3.1.tgz#7fb632c2992ef707a1e73286e1e704f9cc1772b2" + integrity sha512-78QstU9tXbaHzwlRlKmTpjP9k4Pvre5l0r8Spo4SbFFVy/4Abg9I6ZjHwjg2QyKEAMg020XcjP+UgLZIY50yEg== + dependencies: + jest-get-type "^27.3.1" + pretty-format "^27.3.1" + +jest-matcher-utils@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.3.1.tgz#257ad61e54a6d4044e080d85dbdc4a08811e9c1c" + integrity sha512-hX8N7zXS4k+8bC1Aj0OWpGb7D3gIXxYvPNK1inP5xvE4ztbz3rc4AkI6jGVaerepBnfWB17FL5lWFJT3s7qo8w== + dependencies: + chalk "^4.0.0" + jest-diff "^27.3.1" + jest-get-type "^27.3.1" + pretty-format "^27.3.1" + +jest-message-util@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.3.1.tgz#f7c25688ad3410ab10bcb862bcfe3152345c6436" + integrity sha512-bh3JEmxsTZ/9rTm0jQrPElbY2+y48Rw2t47uMfByNyUVR+OfPh4anuyKsGqsNkXk/TI4JbLRZx+7p7Hdt6q1yg== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.2.5" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + micromatch "^4.0.4" + pretty-format "^27.3.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.3.0: + version "27.3.0" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.3.0.tgz#ddf0ec3cc3e68c8ccd489bef4d1f525571a1b867" + integrity sha512-ziZiLk0elZOQjD08bLkegBzv5hCABu/c8Ytx45nJKkysQwGaonvmTxwjLqEA4qGdasq9o2I8/HtdGMNnVsMTGw== + dependencies: + "@jest/types" "^27.2.5" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.0.6: + version "27.0.6" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.0.6.tgz#02e112082935ae949ce5d13b2675db3d8c87d9c5" + integrity sha512-SUhPzBsGa1IKm8hx2F4NfTGGp+r7BXJ4CulsZ1k2kI+mGLG+lxGrs76veN2LF/aUdGosJBzKgXmNCw+BzFqBDQ== + +jest-resolve-dependencies@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.3.1.tgz#85b99bdbdfa46e2c81c6228fc4c91076f624f6e2" + integrity sha512-X7iLzY8pCiYOnvYo2YrK3P9oSE8/3N2f4pUZMJ8IUcZnT81vlSonya1KTO9ZfKGuC+svE6FHK/XOb8SsoRUV1A== + dependencies: + "@jest/types" "^27.2.5" + jest-regex-util "^27.0.6" + jest-snapshot "^27.3.1" + +jest-resolve@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.3.1.tgz#0e5542172a1aa0270be6f66a65888647bdd74a3e" + integrity sha512-Dfzt25CFSPo3Y3GCbxynRBZzxq9AdyNN+x/v2IqYx6KVT5Z6me2Z/PsSGFSv3cOSUZqJ9pHxilao/I/m9FouLw== + dependencies: + "@jest/types" "^27.2.5" + chalk "^4.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^27.3.1" + jest-pnp-resolver "^1.2.2" + jest-util "^27.3.1" + jest-validate "^27.3.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.3.1.tgz#1d594dcbf3bd8600a7e839e790384559eaf96e3e" + integrity sha512-r4W6kBn6sPr3TBwQNmqE94mPlYVn7fLBseeJfo4E2uCTmAyDFm2O5DYAQAFP7Q3YfiA/bMwg8TVsciP7k0xOww== + dependencies: + "@jest/console" "^27.3.1" + "@jest/environment" "^27.3.1" + "@jest/test-result" "^27.3.1" + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-docblock "^27.0.6" + jest-environment-jsdom "^27.3.1" + jest-environment-node "^27.3.1" + jest-haste-map "^27.3.1" + jest-leak-detector "^27.3.1" + jest-message-util "^27.3.1" + jest-resolve "^27.3.1" + jest-runtime "^27.3.1" + jest-util "^27.3.1" + jest-worker "^27.3.1" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.3.1.tgz#80fa32eb85fe5af575865ddf379874777ee993d7" + integrity sha512-qtO6VxPbS8umqhEDpjA4pqTkKQ1Hy4ZSi9mDVeE9Za7LKBo2LdW2jmT+Iod3XFaJqINikZQsn2wEi0j9wPRbLg== + dependencies: + "@jest/console" "^27.3.1" + "@jest/environment" "^27.3.1" + "@jest/globals" "^27.3.1" + "@jest/source-map" "^27.0.6" + "@jest/test-result" "^27.3.1" + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.4" + jest-haste-map "^27.3.1" + jest-message-util "^27.3.1" + jest-mock "^27.3.0" + jest-regex-util "^27.0.6" + jest-resolve "^27.3.1" + jest-snapshot "^27.3.1" + jest-util "^27.3.1" + jest-validate "^27.3.1" + slash "^3.0.0" + strip-bom "^4.0.0" + yargs "^16.2.0" + +jest-serializer@^27.0.6: + version "27.0.6" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.0.6.tgz#93a6c74e0132b81a2d54623251c46c498bb5bec1" + integrity sha512-PtGdVK9EGC7dsaziskfqaAPib6wTViY3G8E5wz9tLVPhHyiDNTZn/xjZ4khAw+09QkoOVpn7vF5nPSN6dtBexA== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.4" + +jest-snapshot@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.3.1.tgz#1da5c0712a252d70917d46c037054f5918c49ee4" + integrity sha512-APZyBvSgQgOT0XumwfFu7X3G5elj6TGhCBLbBdn3R1IzYustPGPE38F51dBWMQ8hRXa9je0vAdeVDtqHLvB6lg== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/parser" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.3.1" + graceful-fs "^4.2.4" + jest-diff "^27.3.1" + jest-get-type "^27.3.1" + jest-haste-map "^27.3.1" + jest-matcher-utils "^27.3.1" + jest-message-util "^27.3.1" + jest-resolve "^27.3.1" + jest-util "^27.3.1" + natural-compare "^1.4.0" + pretty-format "^27.3.1" + semver "^7.3.2" + +jest-util@^27.0.0, jest-util@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.3.1.tgz#a58cdc7b6c8a560caac9ed6bdfc4e4ff23f80429" + integrity sha512-8fg+ifEH3GDryLQf/eKZck1DEs2YuVPBCMOaHQxVVLmQwl/CDhWzrvChTX4efLZxGrw+AA0mSXv78cyytBt/uw== + dependencies: + "@jest/types" "^27.2.5" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.4" + picomatch "^2.2.3" + +jest-validate@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.3.1.tgz#3a395d61a19cd13ae9054af8cdaf299116ef8a24" + integrity sha512-3H0XCHDFLA9uDII67Bwi1Vy7AqwA5HqEEjyy934lgVhtJ3eisw6ShOF1MDmRPspyikef5MyExvIm0/TuLzZ86Q== + dependencies: + "@jest/types" "^27.2.5" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.3.1" + leven "^3.1.0" + pretty-format "^27.3.1" + +jest-watcher@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.3.1.tgz#ba5e0bc6aa843612b54ddb7f009d1cbff7e05f3e" + integrity sha512-9/xbV6chABsGHWh9yPaAGYVVKurWoP3ZMCv6h+O1v9/+pkOroigs6WzZ0e9gLP/njokUwM7yQhr01LKJVMkaZA== + dependencies: + "@jest/test-result" "^27.3.1" + "@jest/types" "^27.2.5" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.3.1" + string-length "^4.0.1" + +jest-worker@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.3.1.tgz#0def7feae5b8042be38479799aeb7b5facac24b2" + integrity sha512-ks3WCzsiZaOPJl/oMsDjaf0TRiSv7ctNgs0FqRr2nARsovz6AWWy4oLElwcquGSz692DzgZQrCLScPNs5YlC4g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/jest/-/jest-27.3.1.tgz#b5bab64e8f56b6f7e275ba1836898b0d9f1e5c8a" + integrity sha512-U2AX0AgQGd5EzMsiZpYt8HyZ+nSVIh5ujQ9CPp9EQZJMjXIiSZpJNweZl0swatKRoqHWgGKM3zaSwm4Zaz87ng== + dependencies: + "@jest/core" "^27.3.1" + import-local "^3.0.2" + jest-cli "^27.3.1" + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsdom@^16.6.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json5@2.x, json5@^2.1.2: + version "2.2.0" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" + integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== + dependencies: + minimist "^1.2.5" + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.camelcase@4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + integrity sha1-soqmKIorn8ZRA1x3EfZathkDMaY= + +lodash.kebabcase@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" + integrity sha1-hImxyw0p/4gZXM7KRI/21swpXDY= + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.snakecase@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + integrity sha1-OdcUo1NXFHg3rv1ktdy7Fr7Nj40= + +lodash.truncate@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" + integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= + +lodash.upperfirst@4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz#1365edf431480481ef0d1c68957a5ed99d49f7ce" + integrity sha1-E2Xt9DFIBIHvDRxolXpe2Z1J984= + +lodash@4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x, make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +md5-typescript@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/md5-typescript/-/md5-typescript-1.0.5.tgz#68c0b24dff8e5d3162e498fa9893b63be72e038f" + integrity sha1-aMCyTf+OXTFi5Jj6mJO2O+cuA48= + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" + integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + dependencies: + braces "^3.0.1" + picomatch "^2.2.3" + +mime-db@1.51.0: + version "1.51.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" + integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g== + +mime-types@^2.1.12: + version "2.1.34" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24" + integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A== + dependencies: + mime-db "1.51.0" + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + +node-modules-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" + integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= + +node-releases@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5" + integrity sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA== + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nwsapi@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" + integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + +object-inspect@^1.11.0, object-inspect@^1.9.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1" + integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg== + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse5@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" + integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== + +pirates@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" + integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== + dependencies: + node-modules-regexp "^1.0.0" + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= + dependencies: + find-up "^2.1.0" + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +prettier-linter-helpers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" + integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + dependencies: + fast-diff "^1.1.2" + +prettier@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.4.1.tgz#671e11c89c14a4cfc876ce564106c4a6726c9f5c" + integrity sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA== + +pretty-format@^27.0.0, pretty-format@^27.3.1: + version "27.3.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.3.1.tgz#7e9486365ccdd4a502061fa761d3ab9ca1b78df5" + integrity sha512-DR/c+pvFc52nLimLROYjnXPtolawm+uWDxr4FjuLDLUn+ktWnSN851KoHwHzzqq6rfCOjkzN8FLgDrSub6UDuA== + dependencies: + "@jest/types" "^27.2.5" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +psl@^1.1.33: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-is@^17.0.1: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +regexpp@^3.0.0, regexpp@^3.1.0, regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" + integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +semver@7.x, semver@^7.2.1, semver@^7.3.2, semver@^7.3.5: + version "7.3.5" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" + integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.6" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.6.tgz#24e630c4b0f03fea446a2bd299e62b4a6ca8d0af" + integrity sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +source-map-support@^0.5.6: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.5.0: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.3" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" + integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.trimend@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" + integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" + integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +table@^6.0.9: + version "6.7.3" + resolved "https://registry.yarnpkg.com/table/-/table-6.7.3.tgz#255388439715a738391bd2ee4cbca89a4d05a9b7" + integrity sha512-5DkIxeA7XERBqMwJq0aHZOdMadBx4e6eDoFRuyT5VR82J0Ycg2DwM6GfA/EQAhJ+toRTaS1lIdSQCqgrmhPnlw== + dependencies: + ajv "^8.0.1" + lodash.truncate "^4.4.2" + slice-ansi "^4.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +throat@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tough-cookie@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" + integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.1.2" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +ts-jest@27.0.7: + version "27.0.7" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-27.0.7.tgz#fb7c8c8cb5526ab371bc1b23d06e745652cca2d0" + integrity sha512-O41shibMqzdafpuP+CkrOL7ykbmLh+FqQrXEmV9CydQ5JBk0Sj0uAEF5TNNe94fZWKm3yYvWa/IbyV4Yg1zK2Q== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^27.0.0" + json5 "2.x" + lodash.memoize "4.x" + make-error "1.x" + semver "7.x" + yargs-parser "20.x" + +ts-node@10.4.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.4.0.tgz#680f88945885f4e6cf450e7f0d6223dd404895f7" + integrity sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A== + dependencies: + "@cspotcode/source-map-support" "0.7.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + yn "3.1.1" + +tsconfig-paths@^3.11.0: + version "3.11.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz#954c1fe973da6339c78e06b03ce2e48810b65f36" + integrity sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.0" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2: + version "2.3.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" + integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +typescript@4.4.4: + version "4.4.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c" + integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA== + +unbox-primitive@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" + integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== + dependencies: + function-bind "^1.1.1" + has-bigints "^1.0.1" + has-symbols "^1.0.2" + which-boxed-primitive "^1.0.2" + +universalify@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.0.tgz#0aeb763894f1a0a1676adf8a8b7612a38902446c" + integrity sha512-/PRhfd8aTNp9Ggr62HPzXg2XasNFGy5PBt0Rp04du7/8GNNSgxFL6WBTkgMKSL9bFjH+8kKEG3f37FmxiTqUUA== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +wasm-lz4@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/wasm-lz4/-/wasm-lz4-2.0.0.tgz#f0e8cab698c8037f7259a66c8fc698ad87e01933" + integrity sha512-eXm4WpHRuS2O1yOOLGTyDDit+gBJ5wV6/VhR4b0p289pQDABu03eBfGvzua5YJN6teiIskH3eE4SyQTG4/HV9w== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.5" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.5.tgz#8b4bc4af518cfabd0473ae4f99144287b33eb881" + integrity sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yargs-parser@20.x, yargs-parser@^20.2.2: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== From 14f91755b1b786747d215ac30a4a064e98c7dcde Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 19 Nov 2021 11:46:41 -0800 Subject: [PATCH 002/635] Fix GitHub actions workflow (#2) --- .github/{ => workflows}/ci.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{ => workflows}/ci.yml (100%) diff --git a/.github/ci.yml b/.github/workflows/ci.yml similarity index 100% rename from .github/ci.yml rename to .github/workflows/ci.yml From afc75f4ae08fb9908fde5c2a1d141dbfcd193ccd Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 19 Nov 2021 11:47:06 -0800 Subject: [PATCH 003/635] Fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5928acadb0..908f73e777 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Working specification: https://docs.google.com/document/d/1NaC2v0Qlx43661XkrlVnc ## Developer quickstart -### Typescript +### TypeScript Run lint/tests: From fa9516dba755368741edb4922867144c585151a7 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 19 Nov 2021 11:51:27 -0800 Subject: [PATCH 004/635] Add @foxglove/crc to runtime dependencies; remove eventemitter --- typescript/package.json | 3 +-- yarn.lock | 7 +------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/typescript/package.json b/typescript/package.json index 283c84f178..31fbe86bbf 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -25,7 +25,6 @@ "validate": "ts-node --files --project tsconfig.cjs.json scripts/validate.ts" }, "devDependencies": { - "@foxglove/crc": "0.0.3", "@foxglove/eslint-plugin": "0.17.1", "@foxglove/rosmsg": "3.0.0", "@foxglove/rosmsg-serialization": "1.2.3", @@ -53,7 +52,7 @@ "wasm-lz4": "2.0.0" }, "dependencies": { - "eventemitter3": "4.0.7", + "@foxglove/crc": "^0.0.3", "tslib": "^2" } } diff --git a/yarn.lock b/yarn.lock index 40c406076c..245d1c4d95 100644 --- a/yarn.lock +++ b/yarn.lock @@ -341,7 +341,7 @@ resolved "https://registry.yarnpkg.com/@foxglove/cdr/-/cdr-1.2.0.tgz#b8c69b37299ae47bb3ffc0f0eef70d99cf9a0a4b" integrity sha512-KuPGicuOPA4U5Y+a3YfHe6prQhbDlI6zxaIYhhKhF+2/qTophkOemJEqbJcAOfy1rim7+O5n2S9ATPK63C9GPw== -"@foxglove/crc@0.0.3": +"@foxglove/crc@^0.0.3": version "0.0.3" resolved "https://registry.yarnpkg.com/@foxglove/crc/-/crc-0.0.3.tgz#04cd8816454e14f1ec48de17c949199b4b3ec9c2" integrity sha512-DjIZsnL3CyP/yQ/vUYA9cjrD0a/8YXejI5ZmsaOiT16cLfZcTwaCxIN01/ys4jsy+dZCQ/9DnWFn7AEFbiMDaA== @@ -1645,11 +1645,6 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -eventemitter3@4.0.7: - version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" From 03f1dc6a2d28772e2ef19a710f1002a42d845eb1 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 19 Nov 2021 11:57:57 -0800 Subject: [PATCH 005/635] Fix module/main settings and tsconfig outDir --- typescript/package.json | 6 ++++-- typescript/tsconfig.json | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/typescript/package.json b/typescript/package.json index 31fbe86bbf..e1d4a02d42 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -12,13 +12,15 @@ "email": "support@foxglove.dev" }, "homepage": "https://foxglove.dev/", - "main": "./src/index.ts", + "module": "dist/esm/src/index.js", + "main": "dist/cjs/src/index.js", + "typings": "dist/esm/src/index.d.ts", "files": [ "dist", "src" ], "scripts": { - "prepack": "tsc -b", + "prepack": "tsc -b tsconfig.json tsconfig.cjs.json", "lint:ci": "eslint --report-unused-disable-directives .", "lint": "eslint --report-unused-disable-directives --fix .", "test": "jest", diff --git a/typescript/tsconfig.json b/typescript/tsconfig.json index 03bd3c7085..c9f6292197 100644 --- a/typescript/tsconfig.json +++ b/typescript/tsconfig.json @@ -3,7 +3,7 @@ "include": ["./**/*"], "compilerOptions": { "rootDir": ".", - "outDir": "./dist", + "outDir": "./dist/esm", "lib": ["es2020", "dom"] } } From 73092e6dec817fe0ff1da44eeae48b691408ef43 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Thu, 2 Dec 2021 16:13:17 -0800 Subject: [PATCH 006/635] Add bag2proto script to typescript folder (#3) Convert a ros1 bag file to a protobuf mcap file. --- typescript/package.json | 5 +- typescript/scripts/bag2proto.ts | 226 +++++++++++++++++++++++++++++ typescript/src/McapWriter.ts | 151 +++++++++++++++++++ typescript/src/index.ts | 1 + typescript/typings/protobufjs.d.ts | 11 ++ yarn.lock | 104 ++++++++++++- 6 files changed, 496 insertions(+), 2 deletions(-) create mode 100644 typescript/scripts/bag2proto.ts create mode 100644 typescript/src/McapWriter.ts create mode 100644 typescript/typings/protobufjs.d.ts diff --git a/typescript/package.json b/typescript/package.json index e1d4a02d42..596dfa38dc 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -24,10 +24,12 @@ "lint:ci": "eslint --report-unused-disable-directives .", "lint": "eslint --report-unused-disable-directives --fix .", "test": "jest", - "validate": "ts-node --files --project tsconfig.cjs.json scripts/validate.ts" + "validate": "ts-node --files --project tsconfig.cjs.json scripts/validate.ts", + "bag2proto": "ts-node --files --project tsconfig.cjs.json scripts/bag2proto.ts" }, "devDependencies": { "@foxglove/eslint-plugin": "0.17.1", + "@foxglove/rosbag": "0.1.2", "@foxglove/rosmsg": "3.0.0", "@foxglove/rosmsg-serialization": "1.2.3", "@foxglove/rosmsg2-serialization": "1.0.4", @@ -48,6 +50,7 @@ "jest": "27.3.1", "lodash": "4.17.21", "prettier": "2.4.1", + "protobufjs": "6.11.2", "ts-jest": "27.0.7", "ts-node": "10.4.0", "typescript": "4.4.4", diff --git a/typescript/scripts/bag2proto.ts b/typescript/scripts/bag2proto.ts new file mode 100644 index 0000000000..ce7d4327f9 --- /dev/null +++ b/typescript/scripts/bag2proto.ts @@ -0,0 +1,226 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/ + +// convert a ROS1 .bag file to an mcap proto file + +import { Bag } from "@foxglove/rosbag"; +import { FileReader } from "@foxglove/rosbag/node"; +import { parse as parseMessageDefinition } from "@foxglove/rosmsg"; +import { program } from "commander"; +import protobufjs from "protobufjs"; +import descriptor from "protobufjs/ext/descriptor"; +import decompressLZ4 from "wasm-lz4"; + +import { McapWriter, ChannelInfo, Message } from "../src"; + +const builtinSrc = ` +syntax = "proto3"; + +package ros; + +message Time { + fixed32 sec = 1; + fixed32 nsec = 2; +} + +message Duration { + fixed32 sec = 1; + fixed32 nsec = 2; + } +`; + +const BUILTIN_TYPE_MAP = new Map([ + ["time", "ros.Time"], + ["duration", "ros.Duration"], + ["uint8", "int32"], + ["uint16", "int32"], + ["int8", "sint32"], + ["int16", "sint32"], + ["float32", "float"], + ["float64", "double"], +]); + +function rosTypenameToProtoPath(typeName: string): string { + return `ros.${typeName.replace("/", ".")}`; +} + +// convert a combined ros string message definition to protobuf Root instance +function rosMsgDefinitionToProto(typeName: string, msgDef: string): protobufjs.Root { + const definitionArr = parseMessageDefinition(msgDef); + const root = new protobufjs.Root(); + + const BuiltinSrcParse = protobufjs.parse(builtinSrc, { keepCase: true }); + root.add(BuiltinSrcParse.root); + + for (const def of definitionArr) { + const rosDatatypeName = def.name ?? typeName; + const nameParts = rosDatatypeName.split("/"); + if (nameParts.length !== 2) { + throw new Error(`Invalid name ${typeName}`); + } + const packageName = nameParts[0]!; + const msgName = nameParts[1]!; + + const fields: string[] = []; + let fieldNumber = 1; + for (const field of def.definitions) { + if (field.isConstant === true) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + fields.push(`// ${field.type} ${field.name} = ${field.valueText ?? field.value ?? ""}`); + continue; + } + const lineComments: string[] = []; + const qualifiers = []; + if (field.type === "uint8" && field.isArray === true) { + qualifiers.push("bytes"); + } else { + if (field.isArray === true) { + qualifiers.push("repeated"); + } + if (field.isComplex === true) { + qualifiers.push(rosTypenameToProtoPath(field.type)); + } else if (BUILTIN_TYPE_MAP.has(field.type)) { + const protoType = BUILTIN_TYPE_MAP.get(field.type)!; + if (protoType.includes("int")) { + lineComments.push(`originally ${field.type}`); + } + qualifiers.push(BUILTIN_TYPE_MAP.get(field.type)!); + } else { + qualifiers.push(field.type); + } + } + if (field.arrayLength != undefined) { + lineComments.push(`length ${field.arrayLength}`); + } + fields.push( + `${qualifiers.join(" ")} ${field.name} = ${fieldNumber++};${ + lineComments.length > 0 ? " // " + lineComments.join(", ") : "" + }`, + ); + } + + const outputSections = [ + `// Generated from ${rosDatatypeName}`, + + 'syntax = "proto3";', + + `package ros.${packageName};`, + + `message ${msgName} {\n ${fields.join("\n ")}\n}`, + ]; + + const protoSrc = outputSections.filter(Boolean).join("\n\n") + "\n"; + + const ProtoSrcParse = protobufjs.parse(protoSrc, { keepCase: true }); + root.add(ProtoSrcParse.root); + } + + return root; +} + +type TopicDetail = { + channelInfo: ChannelInfo; + MsgRoot: protobufjs.Type; +}; + +async function convert(filePath: string) { + await decompressLZ4.isLoaded; + + const bag = new Bag(new FileReader(filePath)); + await bag.open(); + + const mcapFilePath = filePath.replace(".bag", ".mcap"); + console.debug(`Writing to ${mcapFilePath}`); + + const mcapFile = new McapWriter(); + await mcapFile.open(mcapFilePath); + + const topicToDetailMap = new Map(); + + for (const [, connection] of bag.connections) { + if (!connection.type) { + continue; + } + + const schemaName = rosTypenameToProtoPath(connection.type); + + const root = rosMsgDefinitionToProto(connection.type, connection.messageDefinition); + const MsgRoot = root.lookupType(schemaName); + + // create a descriptor message for the root + // Strip leading `.` from the package names to make them relative to the descriptor + const descriptorMsg = root.toDescriptor("proto3"); + for (const desc of descriptorMsg.file) { + desc.package = desc.package?.substring(1); + } + + const descriptorMsgEncoded = descriptor.FileDescriptorSet.encode(descriptorMsg).finish(); + + const channelInfo: ChannelInfo = { + type: "ChannelInfo", + id: topicToDetailMap.size, + topic: connection.topic, + encoding: "protobuf", + schemaName, + schema: protobufjs.util.base64.encode( + descriptorMsgEncoded, + 0, + descriptorMsgEncoded.byteLength, + ), + data: new ArrayBuffer(0), + }; + + topicToDetailMap.set(connection.topic, { + channelInfo, + MsgRoot, + }); + await mcapFile.write(channelInfo); + } + + const mcapMessages: Array = []; + await bag.readMessages( + { + decompress: { + lz4: (buffer: Uint8Array, size: number) => decompressLZ4(buffer, size), + }, + }, + (result) => { + const detail = topicToDetailMap.get(result.topic); + if (!detail) { + return; + } + + const { channelInfo, MsgRoot } = detail; + + const protoMsg = MsgRoot.fromObject(result.message as Record); + const protoMsgBuffer = MsgRoot.encode(protoMsg).finish(); + + const timestamp = BigInt(result.timestamp.sec) * 1000000000n + BigInt(result.timestamp.nsec); + const msg: Message = { + type: "Message", + channelInfo, + timestamp, + data: protoMsgBuffer, + }; + + mcapMessages.push(msg); + }, + ); + + for (const msg of mcapMessages) { + await mcapFile.write(msg); + } + + await mcapFile.end(); +} + +program + .argument("", "path to .bag file(s)") + .description("Convert a ROS1 .bag file to a mcap file with protobuf messages") + .action(async (files: string[]) => { + for (const file of files) { + await convert(file).catch(console.error); + } + }) + .parse(); diff --git a/typescript/src/McapWriter.ts b/typescript/src/McapWriter.ts new file mode 100644 index 0000000000..c201bb17a4 --- /dev/null +++ b/typescript/src/McapWriter.ts @@ -0,0 +1,151 @@ +import { open, FileHandle } from "fs/promises"; + +import { MCAP_MAGIC, RecordType } from "./constants"; +import { ChannelInfo, McapRecord, Message } from "./types"; + +const LITTLE_ENDIAN = true; + +class Writer { + buffer: ArrayBuffer; + private view: DataView; + private offset = 0; + private textEncoder = new TextEncoder(); + + constructor(scratchBuffer?: ArrayBuffer) { + this.buffer = scratchBuffer ?? new ArrayBuffer(4096); + this.view = new DataView(this.buffer); + } + + size(): number { + return this.offset; + } + + ensureCapacity(capacity: number): void { + if (this.offset + capacity >= this.buffer.byteLength) { + const newBuffer = new ArrayBuffer(this.buffer.byteLength * 2); + new Uint8Array(newBuffer).set(new Uint8Array(this.buffer)); + this.buffer = newBuffer; + this.view = new DataView(newBuffer); + } + } + int8(value: number): void { + this.ensureCapacity(1); + this.view.setInt8(this.offset, value); + this.offset += 1; + } + uint8(value: number): void { + this.ensureCapacity(1); + this.view.setUint8(this.offset, value); + this.offset += 1; + } + int16(value: number): void { + this.ensureCapacity(2); + this.view.setInt16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + } + uint16(value: number): void { + this.ensureCapacity(2); + this.view.setUint16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + } + int32(value: number): void { + this.ensureCapacity(4); + this.view.setInt32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + } + uint32(value: number): void { + this.ensureCapacity(4); + this.view.setUint32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + } + int64(value: bigint): void { + this.ensureCapacity(8); + this.view.setBigInt64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + } + uint64(value: bigint): void { + this.ensureCapacity(8); + this.view.setBigUint64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + } + string(value: string): void { + this.uint32(value.length); + const stringBytes = this.textEncoder.encode(value); + this.ensureCapacity(stringBytes.byteLength); + new Uint8Array(this.buffer, this.offset, stringBytes.byteLength).set(stringBytes); + this.offset += stringBytes.length; + } + + toUint8(): Uint8Array { + return new Uint8Array(this.buffer, 0, this.size()); + } +} + +export default class McapWriter { + private writeStream?: FileHandle; + + async open(pathname: string): Promise { + this.writeStream = await open(pathname, "w"); + + // write the magic + // 0x89, M, C, A, P, \r, \n, \n + await this.writeStream.write(new Uint8Array(MCAP_MAGIC)); + + // write the format version + await this.writeStream.write(new Uint8Array([1])); + } + + async write(record: McapRecord): Promise { + switch (record.type) { + case "ChannelInfo": + await this.writeChannelInfoRecord(record); + break; + case "Message": + await this.writeMessageRecord(record); + break; + default: + throw new Error(`Unsupported record type: ${record.type}`); + } + } + + async end(): Promise { + // write the footer + const serializer = new Writer(); + serializer.uint8(RecordType.FOOTER); + serializer.uint64(0n); + serializer.uint32(0); + await this.writeStream?.write(serializer.toUint8()); + + await this.writeStream?.close(); + } + + private async writeChannelInfoRecord(info: ChannelInfo): Promise { + const serializer = new Writer(); + serializer.uint32(info.id); + serializer.string(info.topic); + serializer.string(info.encoding); + serializer.string(info.schemaName); + serializer.string(info.schema); + + const preamble = new Writer(); + preamble.uint8(RecordType.CHANNEL_INFO); + preamble.uint32(serializer.size()); + + await this.writeStream?.write(preamble.toUint8()); + await this.writeStream?.write(serializer.toUint8()); + } + + private async writeMessageRecord(message: Message): Promise { + const serializer = new Writer(); + serializer.uint32(message.channelInfo.id); + serializer.uint64(message.timestamp); + + const preamble = new Writer(); + preamble.uint8(RecordType.MESSAGE); + preamble.uint32(serializer.size() + message.data.byteLength); + + await this.writeStream?.write(preamble.toUint8()); + await this.writeStream?.write(serializer.toUint8()); + await this.writeStream?.write(new Uint8Array(message.data)); + } +} diff --git a/typescript/src/index.ts b/typescript/src/index.ts index 4e02da5ef1..94e6da95a4 100644 --- a/typescript/src/index.ts +++ b/typescript/src/index.ts @@ -1,3 +1,4 @@ export { default as McapReader } from "./McapReader"; export * from "./parse"; export * from "./types"; +export { default as McapWriter } from "./McapWriter"; diff --git a/typescript/typings/protobufjs.d.ts b/typescript/typings/protobufjs.d.ts new file mode 100644 index 0000000000..426f994394 --- /dev/null +++ b/typescript/typings/protobufjs.d.ts @@ -0,0 +1,11 @@ +import protobufjs from "protobufjs"; +import descriptor from "protobufjs/ext/descriptor"; + +// https://github.com/protobufjs/protobuf.js/issues/1499 +declare module "protobufjs" { + interface ReflectionObject { + toDescriptor( + protoVersion: string, + ): protobufjs.Message & descriptor.IFileDescriptorSet; + } +} diff --git a/yarn.lock b/yarn.lock index 245d1c4d95..b96d7d827d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -351,7 +351,17 @@ resolved "https://registry.yarnpkg.com/@foxglove/eslint-plugin/-/eslint-plugin-0.17.1.tgz#f29fbe7bf538f2721535a9c06ec6ec607ad72f71" integrity sha512-/Z32WFj7bBfEIdCh0aXTc5mQbVUcm1KvG586FxCq8k0r0oBUml0bAtLIem9IjE7sUWtabSDJWTFLJpHJhlP+XA== -"@foxglove/rosmsg-serialization@1.2.3": +"@foxglove/rosbag@0.1.2": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@foxglove/rosbag/-/rosbag-0.1.2.tgz#eba82bb9d8737c727555acd9ec17079058fb108e" + integrity sha512-BFI+zzSh28uTtglWa0BfhGJGlEkjiGZYFaVASS0OXlE+9XaU1WkPk0GEdQAeGUontYVd/M4z5nuVi3+XAVahfg== + dependencies: + "@foxglove/rosmsg" "^2.0.0 || ^3.0.0" + "@foxglove/rosmsg-serialization" "^1.2.3" + "@foxglove/rostime" "^1.1.0" + heap "^0.2.6" + +"@foxglove/rosmsg-serialization@1.2.3", "@foxglove/rosmsg-serialization@^1.2.3": version "1.2.3" resolved "https://registry.yarnpkg.com/@foxglove/rosmsg-serialization/-/rosmsg-serialization-1.2.3.tgz#742adc7a322357b0a19aaebb4fe7c2c557f55e92" integrity sha512-wXYqEtcJAXjJEKxxB18tx/3bbI2peQdbqzIWYnCCf9VwVDLk57O/fGqHLJnH5hUH4I37khE/Q0xU/JXSUrp46A== @@ -604,6 +614,59 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" + integrity sha1-m4sMxmPWaafY9vXQiToU00jzD78= + +"@protobufjs/base64@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" + integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== + +"@protobufjs/codegen@^2.0.4": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" + integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== + +"@protobufjs/eventemitter@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" + integrity sha1-NVy8mLr61ZePntCV85diHx0Ga3A= + +"@protobufjs/fetch@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" + integrity sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU= + dependencies: + "@protobufjs/aspromise" "^1.1.1" + "@protobufjs/inquire" "^1.1.0" + +"@protobufjs/float@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" + integrity sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E= + +"@protobufjs/inquire@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" + integrity sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik= + +"@protobufjs/path@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" + integrity sha1-bMKyDFya1q0NzP0hynZz2Nf79o0= + +"@protobufjs/pool@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" + integrity sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q= + +"@protobufjs/utf8@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" + integrity sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA= + "@sinonjs/commons@^1.7.0": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -725,11 +788,21 @@ resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.176.tgz#641150fc1cda36fbfa329de603bbb175d7ee20c0" integrity sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ== +"@types/long@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9" + integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== + "@types/node@*", "@types/node@16.11.9": version "16.11.9" resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.9.tgz#879be3ad7af29f4c1a5c433421bf99fab7047185" integrity sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A== +"@types/node@>=13.7.0": + version "16.11.11" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.11.tgz#6ea7342dfb379ea1210835bada87b3c512120234" + integrity sha512-KB0sixD67CeecHC33MYn+eYARkqTheIRNuu97y2XMjR7Wu3XibO1vaY6VBV6O/a89SPI81cEUIYT87UqUWlZNw== + "@types/prettier@^2.1.5": version "2.4.2" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.4.2.tgz#4c62fae93eb479660c3bd93f9d24d561597a8281" @@ -1912,6 +1985,11 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +heap@^0.2.6: + version "0.2.6" + resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac" + integrity sha1-CH4fELBGky/IWU3Z5tN4r8nR5aw= + html-encoding-sniffer@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" @@ -2756,6 +2834,11 @@ lodash@4.17.21, lodash@^4.7.0: resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== +long@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" + integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -3109,6 +3192,25 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.5" +protobufjs@6.11.2: + version "6.11.2" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.2.tgz#de39fabd4ed32beaa08e9bb1e30d08544c1edf8b" + integrity sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/long" "^4.0.1" + "@types/node" ">=13.7.0" + long "^4.0.0" + psl@^1.1.33: version "1.8.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" From b04aef70794bdce6648ed45f7d4f9f9a62abed28 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Thu, 16 Dec 2021 19:17:25 -0800 Subject: [PATCH 007/635] Enhancements/fixes to bag2proto (#5) - Add support for bz2 compressed bags - Add support for Float64Array handling --- typescript/package.json | 1 + typescript/scripts/bag2proto.ts | 59 ++++++++++++++++++++++++--------- yarn.lock | 5 +++ 3 files changed, 49 insertions(+), 16 deletions(-) diff --git a/typescript/package.json b/typescript/package.json index 596dfa38dc..3ff82dc736 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -34,6 +34,7 @@ "@foxglove/rosmsg-serialization": "1.2.3", "@foxglove/rosmsg2-serialization": "1.0.4", "@foxglove/tsconfig": "1.1.0", + "@foxglove/wasm-bz2": "^0.0.4", "@types/jest": "27.0.3", "@types/lodash": "4.14.176", "@types/node": "16.11.9", diff --git a/typescript/scripts/bag2proto.ts b/typescript/scripts/bag2proto.ts index ce7d4327f9..c440b61ee5 100644 --- a/typescript/scripts/bag2proto.ts +++ b/typescript/scripts/bag2proto.ts @@ -7,6 +7,7 @@ import { Bag } from "@foxglove/rosbag"; import { FileReader } from "@foxglove/rosbag/node"; import { parse as parseMessageDefinition } from "@foxglove/rosmsg"; +import Bzip2 from "@foxglove/wasm-bz2"; import { program } from "commander"; import protobufjs from "protobufjs"; import descriptor from "protobufjs/ext/descriptor"; @@ -35,8 +36,8 @@ const BUILTIN_TYPE_MAP = new Map([ ["duration", "ros.Duration"], ["uint8", "int32"], ["uint16", "int32"], - ["int8", "sint32"], - ["int16", "sint32"], + ["int8", "int32"], + ["int16", "int32"], ["float32", "float"], ["float64", "double"], ]); @@ -72,7 +73,7 @@ function rosMsgDefinitionToProto(typeName: string, msgDef: string): protobufjs.R } const lineComments: string[] = []; const qualifiers = []; - if (field.type === "uint8" && field.isArray === true) { + if (field.isArray === true && (field.type === "uint8" || field.type === "int8")) { qualifiers.push("bytes"); } else { if (field.isArray === true) { @@ -124,8 +125,26 @@ type TopicDetail = { MsgRoot: protobufjs.Type; }; +// Protobuf fromObject doesn't like being given Float64Arrays +// We need to recursively convert all Float64Arrays into regular arrays +function convertTypedArrays(msg: Record): Record { + for (const [key, value] of Object.entries(msg)) { + if (value == undefined) { + continue; + } + if (value instanceof Float64Array) { + msg[key] = Array.from(value); + } else if (typeof value === "object") { + msg[key] = convertTypedArrays(value as Record); + } + } + + return msg; +} + async function convert(filePath: string) { await decompressLZ4.isLoaded; + const bzip2 = await Bzip2.init(); const bag = new Bag(new FileReader(filePath)); await bag.open(); @@ -183,6 +202,7 @@ async function convert(filePath: string) { { decompress: { lz4: (buffer: Uint8Array, size: number) => decompressLZ4(buffer, size), + bz2: (buffer: Uint8Array, size: number) => bzip2.decompress(buffer, size, { small: false }), }, }, (result) => { @@ -192,19 +212,26 @@ async function convert(filePath: string) { } const { channelInfo, MsgRoot } = detail; - - const protoMsg = MsgRoot.fromObject(result.message as Record); - const protoMsgBuffer = MsgRoot.encode(protoMsg).finish(); - - const timestamp = BigInt(result.timestamp.sec) * 1000000000n + BigInt(result.timestamp.nsec); - const msg: Message = { - type: "Message", - channelInfo, - timestamp, - data: protoMsgBuffer, - }; - - mcapMessages.push(msg); + try { + const rosMsg = convertTypedArrays(result.message as Record); + const protoMsg = MsgRoot.fromObject(rosMsg); + const protoMsgBuffer = MsgRoot.encode(protoMsg).finish(); + + const timestamp = + BigInt(result.timestamp.sec) * 1000000000n + BigInt(result.timestamp.nsec); + const msg: Message = { + type: "Message", + channelInfo, + timestamp, + data: protoMsgBuffer, + }; + + mcapMessages.push(msg); + } catch (err) { + console.error(err); + console.log(result.message); + throw err; + } }, ); diff --git a/yarn.lock b/yarn.lock index b96d7d827d..454c519821 100644 --- a/yarn.lock +++ b/yarn.lock @@ -394,6 +394,11 @@ resolved "https://registry.yarnpkg.com/@foxglove/tsconfig/-/tsconfig-1.1.0.tgz#48c37fffd6f349c3ee08a60fc62ccf636f3b59a6" integrity sha512-qZU4MtXVgPhDBFazSEx7yDEuEg8cPHXFQVhBaUABZkCBdcnEE9sxlgEt0gSikF4fRtY6COGIJPVRflnPJXjJKA== +"@foxglove/wasm-bz2@^0.0.4": + version "0.0.4" + resolved "https://registry.yarnpkg.com/@foxglove/wasm-bz2/-/wasm-bz2-0.0.4.tgz#f34bb8ee434995e133a06e070d0e8067d2f0ea0f" + integrity sha512-/JLg0pVqpL2UwwObsxcIyFHD5imn4sthE7FHIThHCfI9+93ezCfKmTNo1gdn9xxWNobfcpDcv/3jJrr/RhbaNg== + "@humanwhocodes/config-array@^0.5.0": version "0.5.0" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" From d332e0c795b485d3e8defd57d115a30fee4307a8 Mon Sep 17 00:00:00 2001 From: Adrian Macneil Date: Tue, 4 Jan 2022 20:54:54 -0800 Subject: [PATCH 008/635] License (#6) --- LICENSE | 202 ++++++++++++++++++++++++++++++++++++++++ LICENSE.md | 21 ----- README.md | 6 ++ typescript/package.json | 2 +- 4 files changed, 209 insertions(+), 22 deletions(-) create mode 100644 LICENSE delete mode 100644 LICENSE.md diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index 0aa593ea8c..0000000000 --- a/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) Foxglove Technologies Inc - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/README.md index 908f73e777..4c9705eb44 100644 --- a/README.md +++ b/README.md @@ -18,3 +18,9 @@ Read and validate a mcap file: ``` yarn workspace @foxglove/mcap validate file.mcap ``` + +## License + +Licensed under the [Apache License, Version 2.0](/LICENSE). + +Contributors are required to accept the [Contributor License Agreement](https://github.com/foxglove/cla). This ensures clarity around licensing of your contributions, and will allow the project to be donated to community governance in the future. diff --git a/typescript/package.json b/typescript/package.json index 3ff82dc736..2acbf73846 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -2,7 +2,7 @@ "name": "@foxglove/mcap", "version": "0.1.0", "description": "Message Capture file support in TypeScript", - "license": "MIT", + "license": "Apache-2.0", "repository": { "type": "git", "url": "https://github.com/foxglove/mcap.git" From 081470328f98adbd36dd9b6f7a6455430e37cf74 Mon Sep 17 00:00:00 2001 From: Adrian Macneil Date: Tue, 4 Jan 2022 20:56:25 -0800 Subject: [PATCH 009/635] Message Capture -> MCAP (#7) --- README.md | 2 +- typescript/package.json | 2 +- typescript/src/McapReader.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4c9705eb44..f2b3085737 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Message Capture file format +# MCAP file format Working specification: https://docs.google.com/document/d/1NaC2v0Qlx43661XkrlVncybYiuloDFRtwdyDKlPpS-A/edit diff --git a/typescript/package.json b/typescript/package.json index 2acbf73846..e623c03a86 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -1,7 +1,7 @@ { "name": "@foxglove/mcap", "version": "0.1.0", - "description": "Message Capture file support in TypeScript", + "description": "MCAP file support in TypeScript", "license": "Apache-2.0", "repository": { "type": "git", diff --git a/typescript/src/McapReader.ts b/typescript/src/McapReader.ts index 4c946f1275..1d84552f2a 100644 --- a/typescript/src/McapReader.ts +++ b/typescript/src/McapReader.ts @@ -27,7 +27,7 @@ type McapReaderOptions = { }; /** - * A streaming reader for Message Capture files. + * A streaming reader for MCAP files. * * Usage example: * ``` From 1164dad8710870f2b421c22d001727237bfcdec6 Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Mon, 10 Jan 2022 06:44:16 -0800 Subject: [PATCH 010/635] Add MCAP specification (#8) Adds the initial draft of the MCAP specification. --- .gitattributes | 1 + .github/workflows/ci.yml | 1 + README.md | 2 +- docs/specification/README.md | 277 ++++++++++++++++++ .../supported-compression-formats.md | 11 + docs/specification/diagrams/chunked.png | 3 + docs/specification/diagrams/unchunked.png | 3 + docs/specification/notes/explanatory-notes.md | 127 ++++++++ docs/specification/profiles/README.md | 22 ++ docs/specification/profiles/ros1.md | 8 + docs/specification/profiles/ros2.md | 7 + package.json | 3 + yarn.lock | 5 + 13 files changed, 469 insertions(+), 1 deletion(-) create mode 100644 .gitattributes create mode 100644 docs/specification/README.md create mode 100644 docs/specification/compression/supported-compression-formats.md create mode 100644 docs/specification/diagrams/chunked.png create mode 100644 docs/specification/diagrams/unchunked.png create mode 100644 docs/specification/notes/explanatory-notes.md create mode 100644 docs/specification/profiles/README.md create mode 100644 docs/specification/profiles/ros1.md create mode 100644 docs/specification/profiles/ros2.md diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..24a8e87939 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.png filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b7be03bc5..b3f441d4e4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,5 +17,6 @@ jobs: cache: yarn - run: yarn install --frozen-lockfile + - run: yarn run prettier docs/* --check - run: yarn workspace @foxglove/mcap lint:ci - run: yarn workspace @foxglove/mcap test diff --git a/README.md b/README.md index f2b3085737..7a2d91e9b4 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # MCAP file format -Working specification: https://docs.google.com/document/d/1NaC2v0Qlx43661XkrlVncybYiuloDFRtwdyDKlPpS-A/edit +* [Draft specification](./docs/specification) ## Developer quickstart diff --git a/docs/specification/README.md b/docs/specification/README.md new file mode 100644 index 0000000000..8f6657c35d --- /dev/null +++ b/docs/specification/README.md @@ -0,0 +1,277 @@ +# MCAP File Format Specification + +[tlv wiki]: https://en.wikipedia.org/wiki/Type-length-value +[profiles]: ./profiles +[compression formats]: ./compression/supported-compression-formats.md +[explanatory notes]: ./notes/explanatory-notes.md +[diagram unchunked]: ./diagrams/unchunked.png +[diagram chunked]: ./diagrams/chunked.png +[feature explanations]: ./notes/explanatory-notes.md#feature-explanations + +> Status: DRAFT + +## Overview + +MCAP is a container file format for append-only storage of +heterogeneously-schematized data. It is inspired by the ROS1 bag format and is +intended to support flexible serialization options, while also generalizing to +non-ROS systems and retaining characteristics such as self-containment and +chunk compression. Features include: + +- Single-pass, indexed writes (no backward seeking) +- Flexible message serialization options (e.g. ros1, protobuf, …) +- Self-contained (message schemas are included in the file) +- Fast remote file summarization +- File attachments +- Optional chunk compression +- Optional CRC integrity checks + +### Glossary + +Some helpful terms to understand in the following sections are: + +- **Record**: A [TLV triplet][tlv wiki] with type and value corresponding to one + of the opcodes and schemas below. +- **Topic**: A named message type and associated schema. +- **Channel**: A logical stream that contains messages on a single topic. + Channels are associated with a numeric ID by the recorder - the **Channel ID**. +- **Channel Info**: A type of record describing information about a channel, + notably containing the name and schema of the topic. +- **Message**: A type of record representing a timestamped message on a channel + (and therefore associated with a topic/schema). A message can be parsed by a + reader that has also read the channel info for the channel on which the + message appears. +- **Chunk**: A record type that wraps a compressed set of channel info and + message records. +- **Attachment**: Extra data that may be included in the file, outside the + chunks. Attachments may be quickly listed and accessed via an index at the + end of the file. +- **Index**: The format contains indexes for both messages and attachments. For + messages, there are two levels of indexing - a **Chunk Index** at the end of + the file points to chunks by offset, enabling fast location of chunks based + on topic and timerange. A second index - the **Message Index** - after each + chunk contains, for each channel in the chunk, and offset and timestamp for + every message to allow fast location of messages within the decompressed + chunk data. + + The attachment index at the end of the file allows for fast listing and + location of attachments based on name, timestamp, or attachment type. + +## Format Description + +An MCAP file is physically structured as a series of concatenated, type- and +length-prefixed **"records"**, capped on each end with magic bytes: + + [...] + +These are the magic bytes: + + 0x89, M, C, A, P, 0x30, \r, \n + +> Note: The version byte (ASCII zero 0x30) following "MCAP" will be updated to +> 1 (0x31) upon ratification of this specification. Until then, backward +> compatibility is not guaranteed. + +MCAP files may be **"chunked"** or **"unchunked"**. Chunked and unchunked files +have different constraints on the layout of record types in the file. In +chunked files, messages are grouped into optionally-compressed blocks of data +before being written to disk. In an unchunked file, each message is written out +uncompressed. See the diagrams below for clarity (the record types shown are +described in the following section): + +#### Chunked + +![Chunked][diagram chunked] + +#### Unchunked + +![Unchunked][diagram unchunked] + +Benefits of chunked files include: + +- Support for random access via time- and topic-based indexing. +- Reduced storage requirements when recording or processing data. +- Reduced bandwidth requirements when transferring over a network. +- Possibly higher write performance if the cost of IO outweighs the cost of + compression. + +Benefits of unchunked files include: + +- Higher write performance on CPU-constrained systems. +- Less potential for data los in case of a recording crash. No + "to-be-compressed" buffer is dropped by the recorder -- though the protocol + makes no specification on how the process syncs unchunked messages to disk. + +Unchunked files are less friendly to readers than chunked files due to their +lack of an index and greater size. When unchunked files are in use, they may be +converted to chunked files in post-processing to mitigate this. + +### Record Types + +Record types are identified by single-byte **opcodes**. Record opcodes in the +range 0x01-0x7F are reserved for future MCAP format usage. 0x80-0xFF are +reserved for application extensions and user proposals. + +##### Serialization and Notation + +The section below uses the following data types and serialization choices. In +all cases integers are serialized little endian: + +- **Timestamp**: uint64 nanoseconds since a user-understood epoch (i.e unix + epoch, robot boot time, etc) +- **String**: a uint32-prefixed UTF8 string +- **KeyValues**: A uint32 length-prefixed association of key-value pairs, + serialized as +- **Bytes**: refers to an array of bytes, without a length prefix. If a length + prefix is required a designation like "uint32 length-prefixed bytes" will be + used. + +``` + +``` + +An empty KeyValues consists of a zero-value length prefix. + +#### Header (op=0x01) + +The first record in every mcap file is a header. + +| Bytes | Name | Type | Description | +| ----- | -------- | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | +| N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | + +#### Footer (op=0x02) + +| Bytes | Name | Type | Description | +| ----- | ------------ | ------ | ----------------------------------------------------------------------------------------------------- | +| 8 | index_offset | uint64 | Pointer to start of index section. If there are no records in the index section, this should be zero. | +| 4 | index_crc | uint32 | CRC of all data from index_offset through the byte immediately preceding this CRC. Optionally zero. | + +A file without a footer is **corrupt**, indicating the writer process encountered +an unclean shutdown. It may be possible to recover data from a corrupt file. + +#### Channel Info (op=0x03) + +Identifies a stream of messages on a particular topic and includes information +about how the messages should be decoded by readers. A channel info record must +occur in the file prior to any message that references its Channel ID. Channel +IDs must uniquely identify a channel across the entire file. + +| Bytes | Name | Type | Description | Example | +| ----- | ----------- | ---------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 2 | channel_id | uint16 | Channel ID 1 | 1 | +| 4 + N | topic_name | String | Topic | /diagnostics | +| 4 + N | encoding | String | Message Encoding | cdr, cbor, ros1, protobuf, etc. | +| 4 + N | schema_name | String | Schema Name | std_msgs/Header | +| 4+N | schema | uint32 length-prefixed bytes | Schema | | +| N | user_data | KeyValues | Metadata about this channel | used to encode protocol-specific details like callerid, latching, QoS profiles... Refer to [supported profiles][profiles]. | +| 4 | crc | uint32 | CRC checksum of preceding fields in the record. If advantageous for performance, zero may be recorded. Readers will need to skip checksum validation to parse such a file. | | + +#### Message (op=0x04) + +A message record encodes a single timestamped message on a particular channel. +Message records may occur inside a Chunk, or outside the chunk in the +case of an unchunked file. A chunked file may not have messages outside the +chunks. + +Message records must be preceded by a Channel Info record for the given channel +ID. That Channel Info record may appear inside the same chunk as the message, +or in an earlier chunk in the file. In an unchunked file, both the channel info +and message records will be outside chunks, as there will be no chunks. + +| Bytes | Name | Type | Description | +| ----- | ------------ | --------- | --------------------------------------------------------------------------------------------------------------- | +| 2 | channel_id | uint16 | Channel ID | +| 4 | sequence | uint32 | Optional message counter assigned by publisher. If not assigned by publisher, must be recorded by the recorder. | +| 8 | publish_time | Timestamp | Time at which the message was published. If not available, must be set to the record time. | +| 8 | record_time | Timestamp | Time at which the message was recorded by the recorder process. | +| N | message_data | Bytes | Message data, to be decoded according to the schema of the channel. | + +#### Chunk (op=0x05) + +A Chunk is a collection of compressed channel info and message records. + +| Bytes | Name | Type | Description | +| ----- | ----------------- | ------ | ------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------- | +| 8 | uncompressed_size | uint64 | Uncompressed size of of the "records" section. | +| 4 | uncompressed_crc | uint32 | CRC32 checksum of decompressed "records" section. May be set to zero if CRC validation isn't required. | +| 4 + N | compression | String | compression algorithm | lz4, zstd, "". A zero-length string indicates no compression. Refer to [supported compression formats][compression formats]. | +| N | records | Bytes | Concatenated records, compressed with the algorithm in the "compression" field. | + +#### Message Index (op=0x06) + +The Message Index record maps timestamps to message offsets. One message index +record is written for each channel in the preceding chunk. All message index +records for a chunk must immediately follow the chunk. + +| Bytes | Name | Type | Description | +| ----- | ---------- | ---------------------------- | -------------------------------------------------------------------------------------------------------------- | +| 2 | channel_id | uint16 | Channel ID. | +| 4 | count | uint32 | Number of records in the chunk, on this channel. | +| N | records | KeyValues | Array of timestamp and offset for each record. Offset is relative to the start of the decompressed chunk data. | +| 4 | crc | uint32 | CRC of preceding fields in the record. May be zeroed if not required. | + +#### Chunk Index (op=0x07) + +The Chunk Index records form a coarse index of timestamps to chunk offsets, +along with the locations of the message index records associatiated with those +chunks. + +| Bytes | Name | Type | Description | +| ----- | --------------------- | ------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | +| 8 | start_time | Timestamp | First message record timestamp in the chunk. | +| 8 | end_time | Timestamp | Last message record timestamp in the chunk. | +| 8 | chunk_offset | uint64 | Offset to the chunk record from the start of the file. | +| N | message_index_offsets | KeyValues | Mapping from channel ID, to the offset of the message index record for that channel after the chunk, from the start of the file. | +| 8 | message_index_length | uint64 | Total length in bytes of the message index records after the chunk, including lengths and opcodes. | +| 4 + N | compression | String | The compression used on this chunk. Refer to [supported compression formats][compression formats]. | +| 8 | compressed_size | uint64 | The compressed size of the chunk. | +| 8 | decompressed_size | uint64 | The decompressed size of the chunk. | +| 4 | crc | uint32 | CRC of the preceding fields within the record. | + +#### Attachment (op=0x08) + +Attachments can be used to attach artifacts such as calibration data, text, or +core dumps. Attachment records must not appear within a chunk. + +| Bytes | Name | Type | Description | +| ----- | ------------ | ---------------------------- | ------------------------------------------------------- | +| 4+N | name | String | Name of the attachment, e.g "scene1.jpg". | +| 8 | record_time | Timestamp | Time at which the attachment was recorded. | +| 4+N | content_type | String | MIME Type (e.g "text/plain"). | +| 8+N | data | uint64 length-prefixed bytes | Size of the attachment. | +| 4 | crc | uint32 | CRC of preceding fields in the record. Optionally zero. | + +#### Attachment Index (op=0x09) + +The attachment index is an index to named attachments within the file. One +record is recorded per attachment in the file. + +| Bytes | Name | Type | Description | +| ----- | --------------- | --------- | ---------------------------------------------------------- | +| 8 | record_time | Timestamp | Timestamp at which the attachment was recorded. | +| 8 | attachment_size | uint64 | Size of the attachment. | +| 4 + N | name | String | Name of the attachment. | +| 4 + N | content_type | String | MIME type of the attachment. | +| 8 | offset | uint64 | Byte offset to the attachment, from the start of the file. | + +#### Statistics (op=0x0A) + +The statistics record contains statistics about the recorded data. It is the +last record in the file before the footer. + +| Bytes | Name | Type | Description | +| ----- | ---------------- | ------------------------- | -------------------------------------------------------------- | +| 8 | message_count | uint64 | Number of messages in the file across all topics. | +| 4 | channel_count | uint32 | Number of channels in the file across all topics. | +| 4 | attachment_count | uint32 | Number of attachments in the file. | +| 4 | chunk_count | uint32 | Number of chunks in the file. | +| N | channel_stats | KeyValues | Array of channel IDs and total message counts for the channel. | + +## Further Reading + +Useful links below: + +- [Feature explanations][feature explanations]: includes usage details that may + be useful to implementers of readers or writers. diff --git a/docs/specification/compression/supported-compression-formats.md b/docs/specification/compression/supported-compression-formats.md new file mode 100644 index 0000000000..ef1aafdf25 --- /dev/null +++ b/docs/specification/compression/supported-compression-formats.md @@ -0,0 +1,11 @@ +## Supported Compression Formats + +[lz4]: https://en.wikipedia.org/wiki/LZ4_(compression_algorithm) +[zstd]: https://en.wikipedia.org/wiki/Zstandard + +MCAP-supported chunk compression formats are listed below: + +- [lz4][lz4]: an algorithm that prioritizes compression/decompression speed over + compression ratio. +- [zstd][zstd]: an algorithm that prioritizes compression ratio over + compression/decompression speed. diff --git a/docs/specification/diagrams/chunked.png b/docs/specification/diagrams/chunked.png new file mode 100644 index 0000000000..fa5b2bf591 --- /dev/null +++ b/docs/specification/diagrams/chunked.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1aaa25c88cb353b3e1dd94ea3a7e981909f80dc1b0a34c9aa933af9bb517317 +size 103140 diff --git a/docs/specification/diagrams/unchunked.png b/docs/specification/diagrams/unchunked.png new file mode 100644 index 0000000000..dab28289be --- /dev/null +++ b/docs/specification/diagrams/unchunked.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70d721b7fac6f3765b59926fef0fca185c8943fd8ef153b4ca5db1c09acebf35 +size 70137 diff --git a/docs/specification/notes/explanatory-notes.md b/docs/specification/notes/explanatory-notes.md new file mode 100644 index 0000000000..374cf85457 --- /dev/null +++ b/docs/specification/notes/explanatory-notes.md @@ -0,0 +1,127 @@ +# Explanatory Notes + +The following notes may be useful for users of the MCAP format, including +implementers of readers and writers. + +## Feature Explanations + +The format is intended to support efficient, indexed reading of messages and +generation of summary data in both local and remote contexts. "Seeking" should +be imagined to incur either a disk seek or an HTTP range request to an object +store -- the latter being significantly more costly. + +### Scanning for records on specific topics within an interval + +The index is designed to support fast local and remote seek/filter operations +with minimal seeking or range request overhead. The operation of the index for +message reading is as follows: + +1. Client queries for all messages on topics /a, /b, /c between t0 and t1 +2. Reader reads the fixed-length footer off the end of the file +3. Reader parses the index_offset from the footer, and starts reading from that + offset to the end of the file. During this read it will encounter the + following in order: + - A run of channel info records, one per channel in the file + - A run of Message Group Index records, one per chunk in the file + - The attachment index records + - The statistics record + +The reader in this case will stop after the chunk index records. + +4. Using the channel info records at the start of the read, the reader converts + topic names to channel IDs. +5. Using the chunk index records, the reader locates the chunks that must be read, + based on the requested start times, channel IDs, and end times. These chunks + will be a contiguous run. +6. Readers may access the message data in at least two ways, + - “full scan”: Seek from the chunk index to the start of the chunk using + chunk_offset. Read/decompress the entire chunk, discarding messages not on + the requested channels. Skip through the index data and into the next + chunk if it is targeted too. + - “index scan”: Consult the message_index_offsets field in the chunk index + record, and use it to locate specific message indexes after the chunk for + the channels of interest. These message indexes can be used to obtain a + list of offsets, which the reader can seek to and extract messages from. + +Which of these options is preferable will tend to depend on the proportion of +topics in use, as well as potentially whether the storage system is local or +remote. + +### Listing and accessing attachments + +The format provides the ability to list attachments contained wihtin the file, +and quickly extract them from the file contents. To list/select attachments +in the file: + +1. Read the fixed-length footer and seek to the start of the index data + section. +2. Scan forward until encountering the attachment index, then read attachment + index records until encountering a record that is not an attachment index. +3. The rcords covered in the previous read will include attachment names, + types, sizes, and timestamps. These can be used to fill out a list of + attachments for selection. +4. To select an attachment from th efile, seek to the associated offset in the + file and unpack the file content from the attachment record. + +### Accessing summary statistics + +The format provides for fast local or remote access to summary information in +the same style as "rosbag info", with the intent of functional parity with +rosbag info. For reference, here is an example of the rosbag info output: + +``` +path: demo.bag +version: 2.0 +duration: 7.8s +start: Mar 21 2017 19:26:20.10 (1490149580.10) +end: Mar 21 2017 19:26:27.88 (1490149587.88) +size: 67.1 MB +messages: 1606 +compression: lz4 [79/79 chunks; 56.23%] +uncompressed: 119.1 MB @ 15.3 MB/s +compressed: 67.0 MB @ 8.6 MB/s (56.23%) +types: diagnostic_msgs/DiagnosticArray [60810da900de1dd6ddd437c3503511da] + radar_driver/RadarTracks [6a2de2f790cb8bb0e149d45d297462f8] + sensor_msgs/CompressedImage [8f7a12909da2c9d3332d540a0977563f] + sensor_msgs/PointCloud2 [1158d486dd51d683ce2f1be655c3c181] + sensor_msgs/Range [c005c34273dc426c67a020a87bc24148] + tf2_msgs/TFMessage [94810edda583a504dfda3829e70d7eec] +topics: /diagnostics 52 msgs : diagnostic_msgs/DiagnosticArray + /image_color/compressed 234 msgs : sensor_msgs/CompressedImage + /radar/points 156 msgs : sensor_msgs/PointCloud2 + /radar/range 156 msgs : sensor_msgs/Range + /radar/tracks 156 msgs : radar_driver/RadarTracks + /tf 774 msgs : tf2_msgs/TFMessage + /velodyne_points 78 msgs : sensor_msgs/PointCloud2 +``` + +The reader will recover this data from the index as follows: + +1. Read the fixed length footer and seek to the index_offset. +2. Read the run of channel info records that follow to get topic names, types, + and MD5 data (which in case of ROS1 will be in the user data section), as well + as channel IDs to interpret the chunk index records. +3. After the channel infos are the chunk index records, if the file is chunked. + From each chunk index record extract the compression algorithm and + compressed/uncompressed size. From these the reader can compute the compression + statistics shown in the rosbag info summary. For unchunked files this field is + omitted. +4. The MCAP version of “rosbag info” will display information about included + attachments as well. After reading the chunk index records, the attachment + index records will be scanned and incorporated into the summary. +5. Finally, the statistics record is used to compute the start, end, total, and + per-channel message counts. The per-channel message counts must be + grouped/summed over topics for display. + +The only difference between the chunked and unchunked versions of this output +will be the chunk compression statistics (“compressed”, “uncompressed”, +“compression”), which will be omitted in the case of unchunked files. The +summary should be very fast to generate in either local or remote contexts, +requiring no seeking around the file to visit chunks. + +The above is not meant to prescribe a summary formatting, but to demonstrate +that parity with the rosbag summary is supported by MCAP. There are other +details we may consider including, like references to per-channel encryption or +compression if these features get uptake. We could also enable more interaction +with the channel info records, such as quickly obtaining schemas from the file +for particular topics. diff --git a/docs/specification/profiles/README.md b/docs/specification/profiles/README.md new file mode 100644 index 0000000000..b2f82b4e99 --- /dev/null +++ b/docs/specification/profiles/README.md @@ -0,0 +1,22 @@ +## Supported profiles + +[ros1]: ./ros1.md +[ros2]: ./ros2.md + +This directory contains supported "profiles" for MCAP channel info user data. +Usage of these profiles is not mandatory, but may be helpful to third party +tooling in better understanding and displaying your data. For instance, an +application that reads a "latching" key from a channel info record will not +necessarily know what to do with the value - however if the reader knows the +MCAP file is recorded with the "ros1" profile, it can make an inference that +this is indicating a "latching topic" and behave accordingly. + +To make use of a profile, simply include the name of the profile in the +"profile" field in the file header, and include the required keys in the user +data section of all channel info records in the file. Additional keys can be +added beyond those required by the profile as desired. + +Supported profiles are listed below: + +- [ros1][ros1] +- [ros2][ros2] diff --git a/docs/specification/profiles/ros1.md b/docs/specification/profiles/ros1.md new file mode 100644 index 0000000000..a906777920 --- /dev/null +++ b/docs/specification/profiles/ros1.md @@ -0,0 +1,8 @@ +## ROS1 Profile + +Profile name: "ros1". + +Channel info records serialized with the "ros1" profile may include the following fields: + +- callerid (optional, string) +- latching (optional, bool stringified as "true" or "false") diff --git a/docs/specification/profiles/ros2.md b/docs/specification/profiles/ros2.md new file mode 100644 index 0000000000..0bdbb32649 --- /dev/null +++ b/docs/specification/profiles/ros2.md @@ -0,0 +1,7 @@ +## ROS2 Profile + +Profile name: "ros2" + +Channel info records serialized with the "ros1" profile may include the following fields: + +- offered_qos_profiles (required, string) diff --git a/package.json b/package.json index 51f600c901..94d91e167c 100644 --- a/package.json +++ b/package.json @@ -4,5 +4,8 @@ "packages": [ "typescript" ] + }, + "devDependencies": { + "prettier": "^2.5.1" } } diff --git a/yarn.lock b/yarn.lock index 454c519821..948deae4c5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3174,6 +3174,11 @@ prettier@2.4.1: resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.4.1.tgz#671e11c89c14a4cfc876ce564106c4a6726c9f5c" integrity sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA== +prettier@^2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.5.1.tgz#fff75fa9d519c54cf0fce328c1017d94546bc56a" + integrity sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg== + pretty-format@^27.0.0, pretty-format@^27.3.1: version "27.3.1" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.3.1.tgz#7e9486365ccdd4a502061fa761d3ab9ca1b78df5" From 1b4d79e6fb2b3c1403f3a858b034f2543156b00a Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Mon, 10 Jan 2022 10:10:03 -0800 Subject: [PATCH 011/635] Switches two transposed lines in the specification (#10) --- docs/specification/README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 8f6657c35d..18fddcdcff 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -122,9 +122,6 @@ all cases integers are serialized little endian: - **String**: a uint32-prefixed UTF8 string - **KeyValues**: A uint32 length-prefixed association of key-value pairs, serialized as -- **Bytes**: refers to an array of bytes, without a length prefix. If a length - prefix is required a designation like "uint32 length-prefixed bytes" will be - used. ``` @@ -132,6 +129,10 @@ all cases integers are serialized little endian: An empty KeyValues consists of a zero-value length prefix. +- **Bytes**: refers to an array of bytes, without a length prefix. If a length + prefix is required a designation like "uint32 length-prefixed bytes" will be + used. + #### Header (op=0x01) The first record in every mcap file is a header. From 319c56f8532401b672f8ff827aa73a53021d5141 Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Mon, 10 Jan 2022 10:18:45 -0800 Subject: [PATCH 012/635] Adds a missing column header in markdown spec (#11) --- docs/specification/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 18fddcdcff..682954c47a 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -193,7 +193,7 @@ and message records will be outside chunks, as there will be no chunks. A Chunk is a collection of compressed channel info and message records. -| Bytes | Name | Type | Description | +| Bytes | Name | Type | Description | Example | | ----- | ----------------- | ------ | ------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------- | | 8 | uncompressed_size | uint64 | Uncompressed size of of the "records" section. | | 4 | uncompressed_crc | uint32 | CRC32 checksum of decompressed "records" section. May be set to zero if CRC validation isn't required. | From 28b549107742341e9cffcef67b52646f956cd166 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Tue, 11 Jan 2022 15:41:56 -0800 Subject: [PATCH 013/635] Add a library field to the header (#12) --- docs/specification/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/specification/README.md b/docs/specification/README.md index 682954c47a..26111dd985 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -140,6 +140,7 @@ The first record in every mcap file is a header. | Bytes | Name | Type | Description | | ----- | -------- | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | +| N | library | String | freeform string for writer or agent to specify its name and version | | N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | #### Footer (op=0x02) From 683e8ac16b6c94cf471b361b6b90d39e4967fbb7 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Tue, 11 Jan 2022 15:51:23 -0800 Subject: [PATCH 014/635] spec: Improve wording for header library field (#13) --- docs/specification/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 26111dd985..891819069d 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -140,7 +140,7 @@ The first record in every mcap file is a header. | Bytes | Name | Type | Description | | ----- | -------- | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | -| N | library | String | freeform string for writer or agent to specify its name and version | +| N | library | String | freeform string for writer to specify its name, version, or other information for use in debugging | | N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | #### Footer (op=0x02) From 7861687447c94d44fc9a7ea88d95c6efc78918c0 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Wed, 12 Jan 2022 16:36:27 -0800 Subject: [PATCH 015/635] move docs lint to a separate CI job (#14) --- .github/workflows/ci.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b3f441d4e4..559cafadd1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,18 @@ on: branches: ["*"] jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: 16.x + cache: yarn + + - run: yarn install --frozen-lockfile + - run: yarn run prettier docs --check + typescript: runs-on: ubuntu-latest steps: From 199930daed461065b9f9074fccf157b7e80b47fc Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 14 Jan 2022 09:25:28 -0800 Subject: [PATCH 016/635] Unwrap lines in Markdown (#18) Prettier has a setting to unwrap lines of text in Markdown. Not wrapping allows editors to display text at the user's desired width, and also reduces the number of lines that have diffs with each edit. --- .github/workflows/ci.yml | 3 +- docs/.prettierrc.yml | 1 + docs/specification/README.md | 262 +++++++----------- .../supported-compression-formats.md | 6 +- docs/specification/notes/explanatory-notes.md | 95 ++----- docs/specification/profiles/README.md | 13 +- package.json | 3 + 7 files changed, 135 insertions(+), 248 deletions(-) create mode 100644 docs/.prettierrc.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 559cafadd1..fe9afed6b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: cache: yarn - run: yarn install --frozen-lockfile - - run: yarn run prettier docs --check + - run: yarn lint:docs typescript: runs-on: ubuntu-latest @@ -29,6 +29,5 @@ jobs: cache: yarn - run: yarn install --frozen-lockfile - - run: yarn run prettier docs/* --check - run: yarn workspace @foxglove/mcap lint:ci - run: yarn workspace @foxglove/mcap test diff --git a/docs/.prettierrc.yml b/docs/.prettierrc.yml new file mode 100644 index 0000000000..7b5590248b --- /dev/null +++ b/docs/.prettierrc.yml @@ -0,0 +1 @@ +proseWrap: never diff --git a/docs/specification/README.md b/docs/specification/README.md index 891819069d..3ca1a960ef 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -12,11 +12,7 @@ ## Overview -MCAP is a container file format for append-only storage of -heterogeneously-schematized data. It is inspired by the ROS1 bag format and is -intended to support flexible serialization options, while also generalizing to -non-ROS systems and retaining characteristics such as self-containment and -chunk compression. Features include: +MCAP is a container file format for append-only storage of heterogeneously-schematized data. It is inspired by the ROS1 bag format and is intended to support flexible serialization options, while also generalizing to non-ROS systems and retaining characteristics such as self-containment and chunk compression. Features include: - Single-pass, indexed writes (no backward seeking) - Flexible message serialization options (e.g. ros1, protobuf, …) @@ -30,37 +26,20 @@ chunk compression. Features include: Some helpful terms to understand in the following sections are: -- **Record**: A [TLV triplet][tlv wiki] with type and value corresponding to one - of the opcodes and schemas below. +- **Record**: A [TLV triplet][tlv wiki] with type and value corresponding to one of the opcodes and schemas below. - **Topic**: A named message type and associated schema. -- **Channel**: A logical stream that contains messages on a single topic. - Channels are associated with a numeric ID by the recorder - the **Channel ID**. -- **Channel Info**: A type of record describing information about a channel, - notably containing the name and schema of the topic. -- **Message**: A type of record representing a timestamped message on a channel - (and therefore associated with a topic/schema). A message can be parsed by a - reader that has also read the channel info for the channel on which the - message appears. -- **Chunk**: A record type that wraps a compressed set of channel info and - message records. -- **Attachment**: Extra data that may be included in the file, outside the - chunks. Attachments may be quickly listed and accessed via an index at the - end of the file. -- **Index**: The format contains indexes for both messages and attachments. For - messages, there are two levels of indexing - a **Chunk Index** at the end of - the file points to chunks by offset, enabling fast location of chunks based - on topic and timerange. A second index - the **Message Index** - after each - chunk contains, for each channel in the chunk, and offset and timestamp for - every message to allow fast location of messages within the decompressed - chunk data. - - The attachment index at the end of the file allows for fast listing and - location of attachments based on name, timestamp, or attachment type. +- **Channel**: A logical stream that contains messages on a single topic. Channels are associated with a numeric ID by the recorder - the **Channel ID**. +- **Channel Info**: A type of record describing information about a channel, notably containing the name and schema of the topic. +- **Message**: A type of record representing a timestamped message on a channel (and therefore associated with a topic/schema). A message can be parsed by a reader that has also read the channel info for the channel on which the message appears. +- **Chunk**: A record type that wraps a compressed set of channel info and message records. +- **Attachment**: Extra data that may be included in the file, outside the chunks. Attachments may be quickly listed and accessed via an index at the end of the file. +- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on topic and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the decompressed chunk data. + + The attachment index at the end of the file allows for fast listing and location of attachments based on name, timestamp, or attachment type. ## Format Description -An MCAP file is physically structured as a series of concatenated, type- and -length-prefixed **"records"**, capped on each end with magic bytes: +An MCAP file is physically structured as a series of concatenated, type- and length-prefixed **"records"**, capped on each end with magic bytes: [...] @@ -68,16 +47,9 @@ These are the magic bytes: 0x89, M, C, A, P, 0x30, \r, \n -> Note: The version byte (ASCII zero 0x30) following "MCAP" will be updated to -> 1 (0x31) upon ratification of this specification. Until then, backward -> compatibility is not guaranteed. +> Note: The version byte (ASCII zero 0x30) following "MCAP" will be updated to 1 (0x31) upon ratification of this specification. Until then, backward compatibility is not guaranteed. -MCAP files may be **"chunked"** or **"unchunked"**. Chunked and unchunked files -have different constraints on the layout of record types in the file. In -chunked files, messages are grouped into optionally-compressed blocks of data -before being written to disk. In an unchunked file, each message is written out -uncompressed. See the diagrams below for clarity (the record types shown are -described in the following section): +MCAP files may be **"chunked"** or **"unchunked"**. Chunked and unchunked files have different constraints on the layout of record types in the file. In chunked files, messages are grouped into optionally-compressed blocks of data before being written to disk. In an unchunked file, each message is written out uncompressed. See the diagrams below for clarity (the record types shown are described in the following section): #### Chunked @@ -92,36 +64,26 @@ Benefits of chunked files include: - Support for random access via time- and topic-based indexing. - Reduced storage requirements when recording or processing data. - Reduced bandwidth requirements when transferring over a network. -- Possibly higher write performance if the cost of IO outweighs the cost of - compression. +- Possibly higher write performance if the cost of IO outweighs the cost of compression. Benefits of unchunked files include: - Higher write performance on CPU-constrained systems. -- Less potential for data los in case of a recording crash. No - "to-be-compressed" buffer is dropped by the recorder -- though the protocol - makes no specification on how the process syncs unchunked messages to disk. +- Less potential for data los in case of a recording crash. No "to-be-compressed" buffer is dropped by the recorder -- though the protocol makes no specification on how the process syncs unchunked messages to disk. -Unchunked files are less friendly to readers than chunked files due to their -lack of an index and greater size. When unchunked files are in use, they may be -converted to chunked files in post-processing to mitigate this. +Unchunked files are less friendly to readers than chunked files due to their lack of an index and greater size. When unchunked files are in use, they may be converted to chunked files in post-processing to mitigate this. ### Record Types -Record types are identified by single-byte **opcodes**. Record opcodes in the -range 0x01-0x7F are reserved for future MCAP format usage. 0x80-0xFF are -reserved for application extensions and user proposals. +Record types are identified by single-byte **opcodes**. Record opcodes in the range 0x01-0x7F are reserved for future MCAP format usage. 0x80-0xFF are reserved for application extensions and user proposals. ##### Serialization and Notation -The section below uses the following data types and serialization choices. In -all cases integers are serialized little endian: +The section below uses the following data types and serialization choices. In all cases integers are serialized little endian: -- **Timestamp**: uint64 nanoseconds since a user-understood epoch (i.e unix - epoch, robot boot time, etc) +- **Timestamp**: uint64 nanoseconds since a user-understood epoch (i.e unix epoch, robot boot time, etc) - **String**: a uint32-prefixed UTF8 string -- **KeyValues**: A uint32 length-prefixed association of key-value pairs, - serialized as +- **KeyValues**: A uint32 length-prefixed association of key-value pairs, serialized as ``` @@ -129,151 +91,131 @@ all cases integers are serialized little endian: An empty KeyValues consists of a zero-value length prefix. -- **Bytes**: refers to an array of bytes, without a length prefix. If a length - prefix is required a designation like "uint32 length-prefixed bytes" will be - used. +- **Bytes**: refers to an array of bytes, without a length prefix. If a length prefix is required a designation like "uint32 length-prefixed bytes" will be used. #### Header (op=0x01) The first record in every mcap file is a header. -| Bytes | Name | Type | Description | -| ----- | -------- | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | -| N | library | String | freeform string for writer to specify its name, version, or other information for use in debugging | -| N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | +| N | library | String | freeform string for writer to specify its name, version, or other information for use in debugging | +| N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | #### Footer (op=0x02) -| Bytes | Name | Type | Description | -| ----- | ------------ | ------ | ----------------------------------------------------------------------------------------------------- | -| 8 | index_offset | uint64 | Pointer to start of index section. If there are no records in the index section, this should be zero. | -| 4 | index_crc | uint32 | CRC of all data from index_offset through the byte immediately preceding this CRC. Optionally zero. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 8 | index_offset | uint64 | Pointer to start of index section. If there are no records in the index section, this should be zero. | +| 4 | index_crc | uint32 | CRC of all data from index_offset through the byte immediately preceding this CRC. Optionally zero. | -A file without a footer is **corrupt**, indicating the writer process encountered -an unclean shutdown. It may be possible to recover data from a corrupt file. +A file without a footer is **corrupt**, indicating the writer process encountered an unclean shutdown. It may be possible to recover data from a corrupt file. #### Channel Info (op=0x03) -Identifies a stream of messages on a particular topic and includes information -about how the messages should be decoded by readers. A channel info record must -occur in the file prior to any message that references its Channel ID. Channel -IDs must uniquely identify a channel across the entire file. - -| Bytes | Name | Type | Description | Example | -| ----- | ----------- | ---------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -| 2 | channel_id | uint16 | Channel ID 1 | 1 | -| 4 + N | topic_name | String | Topic | /diagnostics | -| 4 + N | encoding | String | Message Encoding | cdr, cbor, ros1, protobuf, etc. | -| 4 + N | schema_name | String | Schema Name | std_msgs/Header | -| 4+N | schema | uint32 length-prefixed bytes | Schema | | -| N | user_data | KeyValues | Metadata about this channel | used to encode protocol-specific details like callerid, latching, QoS profiles... Refer to [supported profiles][profiles]. | -| 4 | crc | uint32 | CRC checksum of preceding fields in the record. If advantageous for performance, zero may be recorded. Readers will need to skip checksum validation to parse such a file. | | +Identifies a stream of messages on a particular topic and includes information about how the messages should be decoded by readers. A channel info record must occur in the file prior to any message that references its Channel ID. Channel IDs must uniquely identify a channel across the entire file. + +| Bytes | Name | Type | Description | Example | +| --- | --- | --- | --- | --- | +| 2 | channel_id | uint16 | Channel ID 1 | 1 | +| 4 + N | topic_name | String | Topic | /diagnostics | +| 4 + N | encoding | String | Message Encoding | cdr, cbor, ros1, protobuf, etc. | +| 4 + N | schema_name | String | Schema Name | std_msgs/Header | +| 4+N | schema | uint32 length-prefixed bytes | Schema | | +| N | user_data | KeyValues | Metadata about this channel | used to encode protocol-specific details like callerid, latching, QoS profiles... Refer to [supported profiles][profiles]. | +| 4 | crc | uint32 | CRC checksum of preceding fields in the record. If advantageous for performance, zero may be recorded. Readers will need to skip checksum validation to parse such a file. | | #### Message (op=0x04) -A message record encodes a single timestamped message on a particular channel. -Message records may occur inside a Chunk, or outside the chunk in the -case of an unchunked file. A chunked file may not have messages outside the -chunks. +A message record encodes a single timestamped message on a particular channel. Message records may occur inside a Chunk, or outside the chunk in the case of an unchunked file. A chunked file may not have messages outside the chunks. -Message records must be preceded by a Channel Info record for the given channel -ID. That Channel Info record may appear inside the same chunk as the message, -or in an earlier chunk in the file. In an unchunked file, both the channel info -and message records will be outside chunks, as there will be no chunks. +Message records must be preceded by a Channel Info record for the given channel ID. That Channel Info record may appear inside the same chunk as the message, or in an earlier chunk in the file. In an unchunked file, both the channel info and message records will be outside chunks, as there will be no chunks. -| Bytes | Name | Type | Description | -| ----- | ------------ | --------- | --------------------------------------------------------------------------------------------------------------- | -| 2 | channel_id | uint16 | Channel ID | -| 4 | sequence | uint32 | Optional message counter assigned by publisher. If not assigned by publisher, must be recorded by the recorder. | -| 8 | publish_time | Timestamp | Time at which the message was published. If not available, must be set to the record time. | -| 8 | record_time | Timestamp | Time at which the message was recorded by the recorder process. | -| N | message_data | Bytes | Message data, to be decoded according to the schema of the channel. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 2 | channel_id | uint16 | Channel ID | +| 4 | sequence | uint32 | Optional message counter assigned by publisher. If not assigned by publisher, must be recorded by the recorder. | +| 8 | publish_time | Timestamp | Time at which the message was published. If not available, must be set to the record time. | +| 8 | record_time | Timestamp | Time at which the message was recorded by the recorder process. | +| N | message_data | Bytes | Message data, to be decoded according to the schema of the channel. | #### Chunk (op=0x05) A Chunk is a collection of compressed channel info and message records. -| Bytes | Name | Type | Description | Example | -| ----- | ----------------- | ------ | ------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------- | -| 8 | uncompressed_size | uint64 | Uncompressed size of of the "records" section. | -| 4 | uncompressed_crc | uint32 | CRC32 checksum of decompressed "records" section. May be set to zero if CRC validation isn't required. | -| 4 + N | compression | String | compression algorithm | lz4, zstd, "". A zero-length string indicates no compression. Refer to [supported compression formats][compression formats]. | -| N | records | Bytes | Concatenated records, compressed with the algorithm in the "compression" field. | +| Bytes | Name | Type | Description | Example | +| --- | --- | --- | --- | --- | +| 8 | uncompressed_size | uint64 | Uncompressed size of of the "records" section. | +| 4 | uncompressed_crc | uint32 | CRC32 checksum of decompressed "records" section. May be set to zero if CRC validation isn't required. | +| 4 + N | compression | String | compression algorithm | lz4, zstd, "". A zero-length string indicates no compression. Refer to [supported compression formats][compression formats]. | +| N | records | Bytes | Concatenated records, compressed with the algorithm in the "compression" field. | #### Message Index (op=0x06) -The Message Index record maps timestamps to message offsets. One message index -record is written for each channel in the preceding chunk. All message index -records for a chunk must immediately follow the chunk. +The Message Index record maps timestamps to message offsets. One message index record is written for each channel in the preceding chunk. All message index records for a chunk must immediately follow the chunk. -| Bytes | Name | Type | Description | -| ----- | ---------- | ---------------------------- | -------------------------------------------------------------------------------------------------------------- | -| 2 | channel_id | uint16 | Channel ID. | -| 4 | count | uint32 | Number of records in the chunk, on this channel. | -| N | records | KeyValues | Array of timestamp and offset for each record. Offset is relative to the start of the decompressed chunk data. | -| 4 | crc | uint32 | CRC of preceding fields in the record. May be zeroed if not required. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 2 | channel_id | uint16 | Channel ID. | +| 4 | count | uint32 | Number of records in the chunk, on this channel. | +| N | records | KeyValues | Array of timestamp and offset for each record. Offset is relative to the start of the decompressed chunk data. | +| 4 | crc | uint32 | CRC of preceding fields in the record. May be zeroed if not required. | #### Chunk Index (op=0x07) -The Chunk Index records form a coarse index of timestamps to chunk offsets, -along with the locations of the message index records associatiated with those -chunks. - -| Bytes | Name | Type | Description | -| ----- | --------------------- | ------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| 8 | start_time | Timestamp | First message record timestamp in the chunk. | -| 8 | end_time | Timestamp | Last message record timestamp in the chunk. | -| 8 | chunk_offset | uint64 | Offset to the chunk record from the start of the file. | -| N | message_index_offsets | KeyValues | Mapping from channel ID, to the offset of the message index record for that channel after the chunk, from the start of the file. | -| 8 | message_index_length | uint64 | Total length in bytes of the message index records after the chunk, including lengths and opcodes. | -| 4 + N | compression | String | The compression used on this chunk. Refer to [supported compression formats][compression formats]. | -| 8 | compressed_size | uint64 | The compressed size of the chunk. | -| 8 | decompressed_size | uint64 | The decompressed size of the chunk. | -| 4 | crc | uint32 | CRC of the preceding fields within the record. | +The Chunk Index records form a coarse index of timestamps to chunk offsets, along with the locations of the message index records associatiated with those chunks. + +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 8 | start_time | Timestamp | First message record timestamp in the chunk. | +| 8 | end_time | Timestamp | Last message record timestamp in the chunk. | +| 8 | chunk_offset | uint64 | Offset to the chunk record from the start of the file. | +| N | message_index_offsets | KeyValues | Mapping from channel ID, to the offset of the message index record for that channel after the chunk, from the start of the file. | +| 8 | message_index_length | uint64 | Total length in bytes of the message index records after the chunk, including lengths and opcodes. | +| 4 + N | compression | String | The compression used on this chunk. Refer to [supported compression formats][compression formats]. | +| 8 | compressed_size | uint64 | The compressed size of the chunk. | +| 8 | decompressed_size | uint64 | The decompressed size of the chunk. | +| 4 | crc | uint32 | CRC of the preceding fields within the record. | #### Attachment (op=0x08) -Attachments can be used to attach artifacts such as calibration data, text, or -core dumps. Attachment records must not appear within a chunk. +Attachments can be used to attach artifacts such as calibration data, text, or core dumps. Attachment records must not appear within a chunk. -| Bytes | Name | Type | Description | -| ----- | ------------ | ---------------------------- | ------------------------------------------------------- | -| 4+N | name | String | Name of the attachment, e.g "scene1.jpg". | -| 8 | record_time | Timestamp | Time at which the attachment was recorded. | -| 4+N | content_type | String | MIME Type (e.g "text/plain"). | -| 8+N | data | uint64 length-prefixed bytes | Size of the attachment. | -| 4 | crc | uint32 | CRC of preceding fields in the record. Optionally zero. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 4+N | name | String | Name of the attachment, e.g "scene1.jpg". | +| 8 | record_time | Timestamp | Time at which the attachment was recorded. | +| 4+N | content_type | String | MIME Type (e.g "text/plain"). | +| 8+N | data | uint64 length-prefixed bytes | Size of the attachment. | +| 4 | crc | uint32 | CRC of preceding fields in the record. Optionally zero. | #### Attachment Index (op=0x09) -The attachment index is an index to named attachments within the file. One -record is recorded per attachment in the file. +The attachment index is an index to named attachments within the file. One record is recorded per attachment in the file. -| Bytes | Name | Type | Description | -| ----- | --------------- | --------- | ---------------------------------------------------------- | -| 8 | record_time | Timestamp | Timestamp at which the attachment was recorded. | -| 8 | attachment_size | uint64 | Size of the attachment. | -| 4 + N | name | String | Name of the attachment. | -| 4 + N | content_type | String | MIME type of the attachment. | -| 8 | offset | uint64 | Byte offset to the attachment, from the start of the file. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 8 | record_time | Timestamp | Timestamp at which the attachment was recorded. | +| 8 | attachment_size | uint64 | Size of the attachment. | +| 4 + N | name | String | Name of the attachment. | +| 4 + N | content_type | String | MIME type of the attachment. | +| 8 | offset | uint64 | Byte offset to the attachment, from the start of the file. | #### Statistics (op=0x0A) -The statistics record contains statistics about the recorded data. It is the -last record in the file before the footer. +The statistics record contains statistics about the recorded data. It is the last record in the file before the footer. -| Bytes | Name | Type | Description | -| ----- | ---------------- | ------------------------- | -------------------------------------------------------------- | -| 8 | message_count | uint64 | Number of messages in the file across all topics. | -| 4 | channel_count | uint32 | Number of channels in the file across all topics. | -| 4 | attachment_count | uint32 | Number of attachments in the file. | -| 4 | chunk_count | uint32 | Number of chunks in the file. | -| N | channel_stats | KeyValues | Array of channel IDs and total message counts for the channel. | +| Bytes | Name | Type | Description | +| --- | --- | --- | --- | +| 8 | message_count | uint64 | Number of messages in the file across all topics. | +| 4 | channel_count | uint32 | Number of channels in the file across all topics. | +| 4 | attachment_count | uint32 | Number of attachments in the file. | +| 4 | chunk_count | uint32 | Number of chunks in the file. | +| N | channel_stats | KeyValues | Array of channel IDs and total message counts for the channel. | ## Further Reading Useful links below: -- [Feature explanations][feature explanations]: includes usage details that may - be useful to implementers of readers or writers. +- [Feature explanations][feature explanations]: includes usage details that may be useful to implementers of readers or writers. diff --git a/docs/specification/compression/supported-compression-formats.md b/docs/specification/compression/supported-compression-formats.md index ef1aafdf25..5e853884d6 100644 --- a/docs/specification/compression/supported-compression-formats.md +++ b/docs/specification/compression/supported-compression-formats.md @@ -5,7 +5,5 @@ MCAP-supported chunk compression formats are listed below: -- [lz4][lz4]: an algorithm that prioritizes compression/decompression speed over - compression ratio. -- [zstd][zstd]: an algorithm that prioritizes compression ratio over - compression/decompression speed. +- [lz4][lz4]: an algorithm that prioritizes compression/decompression speed over compression ratio. +- [zstd][zstd]: an algorithm that prioritizes compression ratio over compression/decompression speed. diff --git a/docs/specification/notes/explanatory-notes.md b/docs/specification/notes/explanatory-notes.md index 374cf85457..b0bb47e374 100644 --- a/docs/specification/notes/explanatory-notes.md +++ b/docs/specification/notes/explanatory-notes.md @@ -1,26 +1,18 @@ # Explanatory Notes -The following notes may be useful for users of the MCAP format, including -implementers of readers and writers. +The following notes may be useful for users of the MCAP format, including implementers of readers and writers. ## Feature Explanations -The format is intended to support efficient, indexed reading of messages and -generation of summary data in both local and remote contexts. "Seeking" should -be imagined to incur either a disk seek or an HTTP range request to an object -store -- the latter being significantly more costly. +The format is intended to support efficient, indexed reading of messages and generation of summary data in both local and remote contexts. "Seeking" should be imagined to incur either a disk seek or an HTTP range request to an object store -- the latter being significantly more costly. ### Scanning for records on specific topics within an interval -The index is designed to support fast local and remote seek/filter operations -with minimal seeking or range request overhead. The operation of the index for -message reading is as follows: +The index is designed to support fast local and remote seek/filter operations with minimal seeking or range request overhead. The operation of the index for message reading is as follows: 1. Client queries for all messages on topics /a, /b, /c between t0 and t1 2. Reader reads the fixed-length footer off the end of the file -3. Reader parses the index_offset from the footer, and starts reading from that - offset to the end of the file. During this read it will encounter the - following in order: +3. Reader parses the index_offset from the footer, and starts reading from that offset to the end of the file. During this read it will encounter the following in order: - A run of channel info records, one per channel in the file - A run of Message Group Index records, one per chunk in the file - The attachment index records @@ -28,46 +20,26 @@ message reading is as follows: The reader in this case will stop after the chunk index records. -4. Using the channel info records at the start of the read, the reader converts - topic names to channel IDs. -5. Using the chunk index records, the reader locates the chunks that must be read, - based on the requested start times, channel IDs, and end times. These chunks - will be a contiguous run. +4. Using the channel info records at the start of the read, the reader converts topic names to channel IDs. +5. Using the chunk index records, the reader locates the chunks that must be read, based on the requested start times, channel IDs, and end times. These chunks will be a contiguous run. 6. Readers may access the message data in at least two ways, - - “full scan”: Seek from the chunk index to the start of the chunk using - chunk_offset. Read/decompress the entire chunk, discarding messages not on - the requested channels. Skip through the index data and into the next - chunk if it is targeted too. - - “index scan”: Consult the message_index_offsets field in the chunk index - record, and use it to locate specific message indexes after the chunk for - the channels of interest. These message indexes can be used to obtain a - list of offsets, which the reader can seek to and extract messages from. - -Which of these options is preferable will tend to depend on the proportion of -topics in use, as well as potentially whether the storage system is local or -remote. + - “full scan”: Seek from the chunk index to the start of the chunk using chunk_offset. Read/decompress the entire chunk, discarding messages not on the requested channels. Skip through the index data and into the next chunk if it is targeted too. + - “index scan”: Consult the message_index_offsets field in the chunk index record, and use it to locate specific message indexes after the chunk for the channels of interest. These message indexes can be used to obtain a list of offsets, which the reader can seek to and extract messages from. + +Which of these options is preferable will tend to depend on the proportion of topics in use, as well as potentially whether the storage system is local or remote. ### Listing and accessing attachments -The format provides the ability to list attachments contained wihtin the file, -and quickly extract them from the file contents. To list/select attachments -in the file: +The format provides the ability to list attachments contained wihtin the file, and quickly extract them from the file contents. To list/select attachments in the file: -1. Read the fixed-length footer and seek to the start of the index data - section. -2. Scan forward until encountering the attachment index, then read attachment - index records until encountering a record that is not an attachment index. -3. The rcords covered in the previous read will include attachment names, - types, sizes, and timestamps. These can be used to fill out a list of - attachments for selection. -4. To select an attachment from th efile, seek to the associated offset in the - file and unpack the file content from the attachment record. +1. Read the fixed-length footer and seek to the start of the index data section. +2. Scan forward until encountering the attachment index, then read attachment index records until encountering a record that is not an attachment index. +3. The rcords covered in the previous read will include attachment names, types, sizes, and timestamps. These can be used to fill out a list of attachments for selection. +4. To select an attachment from th efile, seek to the associated offset in the file and unpack the file content from the attachment record. ### Accessing summary statistics -The format provides for fast local or remote access to summary information in -the same style as "rosbag info", with the intent of functional parity with -rosbag info. For reference, here is an example of the rosbag info output: +The format provides for fast local or remote access to summary information in the same style as "rosbag info", with the intent of functional parity with rosbag info. For reference, here is an example of the rosbag info output: ``` path: demo.bag @@ -98,30 +70,11 @@ topics: /diagnostics 52 msgs : diagnostic_msgs/Diagnosti The reader will recover this data from the index as follows: 1. Read the fixed length footer and seek to the index_offset. -2. Read the run of channel info records that follow to get topic names, types, - and MD5 data (which in case of ROS1 will be in the user data section), as well - as channel IDs to interpret the chunk index records. -3. After the channel infos are the chunk index records, if the file is chunked. - From each chunk index record extract the compression algorithm and - compressed/uncompressed size. From these the reader can compute the compression - statistics shown in the rosbag info summary. For unchunked files this field is - omitted. -4. The MCAP version of “rosbag info” will display information about included - attachments as well. After reading the chunk index records, the attachment - index records will be scanned and incorporated into the summary. -5. Finally, the statistics record is used to compute the start, end, total, and - per-channel message counts. The per-channel message counts must be - grouped/summed over topics for display. - -The only difference between the chunked and unchunked versions of this output -will be the chunk compression statistics (“compressed”, “uncompressed”, -“compression”), which will be omitted in the case of unchunked files. The -summary should be very fast to generate in either local or remote contexts, -requiring no seeking around the file to visit chunks. - -The above is not meant to prescribe a summary formatting, but to demonstrate -that parity with the rosbag summary is supported by MCAP. There are other -details we may consider including, like references to per-channel encryption or -compression if these features get uptake. We could also enable more interaction -with the channel info records, such as quickly obtaining schemas from the file -for particular topics. +2. Read the run of channel info records that follow to get topic names, types, and MD5 data (which in case of ROS1 will be in the user data section), as well as channel IDs to interpret the chunk index records. +3. After the channel infos are the chunk index records, if the file is chunked. From each chunk index record extract the compression algorithm and compressed/uncompressed size. From these the reader can compute the compression statistics shown in the rosbag info summary. For unchunked files this field is omitted. +4. The MCAP version of “rosbag info” will display information about included attachments as well. After reading the chunk index records, the attachment index records will be scanned and incorporated into the summary. +5. Finally, the statistics record is used to compute the start, end, total, and per-channel message counts. The per-channel message counts must be grouped/summed over topics for display. + +The only difference between the chunked and unchunked versions of this output will be the chunk compression statistics (“compressed”, “uncompressed”, “compression”), which will be omitted in the case of unchunked files. The summary should be very fast to generate in either local or remote contexts, requiring no seeking around the file to visit chunks. + +The above is not meant to prescribe a summary formatting, but to demonstrate that parity with the rosbag summary is supported by MCAP. There are other details we may consider including, like references to per-channel encryption or compression if these features get uptake. We could also enable more interaction with the channel info records, such as quickly obtaining schemas from the file for particular topics. diff --git a/docs/specification/profiles/README.md b/docs/specification/profiles/README.md index b2f82b4e99..362d4d78bb 100644 --- a/docs/specification/profiles/README.md +++ b/docs/specification/profiles/README.md @@ -3,18 +3,9 @@ [ros1]: ./ros1.md [ros2]: ./ros2.md -This directory contains supported "profiles" for MCAP channel info user data. -Usage of these profiles is not mandatory, but may be helpful to third party -tooling in better understanding and displaying your data. For instance, an -application that reads a "latching" key from a channel info record will not -necessarily know what to do with the value - however if the reader knows the -MCAP file is recorded with the "ros1" profile, it can make an inference that -this is indicating a "latching topic" and behave accordingly. +This directory contains supported "profiles" for MCAP channel info user data. Usage of these profiles is not mandatory, but may be helpful to third party tooling in better understanding and displaying your data. For instance, an application that reads a "latching" key from a channel info record will not necessarily know what to do with the value - however if the reader knows the MCAP file is recorded with the "ros1" profile, it can make an inference that this is indicating a "latching topic" and behave accordingly. -To make use of a profile, simply include the name of the profile in the -"profile" field in the file header, and include the required keys in the user -data section of all channel info records in the file. Additional keys can be -added beyond those required by the profile as desired. +To make use of a profile, simply include the name of the profile in the "profile" field in the file header, and include the required keys in the user data section of all channel info records in the file. Additional keys can be added beyond those required by the profile as desired. Supported profiles are listed below: diff --git a/package.json b/package.json index 94d91e167c..7f7c28fd00 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,9 @@ "typescript" ] }, + "scripts": { + "lint:docs": "prettier docs --check" + }, "devDependencies": { "prettier": "^2.5.1" } From 872859c3a5447af0df960647e4207c3309d4e867 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Fri, 14 Jan 2022 14:48:48 -0800 Subject: [PATCH 017/635] Spec edits and clarifications (#19) - Specify record length prefix is uint64 - Specify footer is the last record in a file - Call out when duplicate keys are not allowed in KeyValues - Change all "decompressed" to "uncompressed" - Rename channel_stats to channel_message_counts - Clarify record CRCs do not include opcode/length, and unify wording - Fix incorrect description for attachment data --- docs/specification/README.md | 48 +++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 3ca1a960ef..fab1cdeeef 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -33,13 +33,13 @@ Some helpful terms to understand in the following sections are: - **Message**: A type of record representing a timestamped message on a channel (and therefore associated with a topic/schema). A message can be parsed by a reader that has also read the channel info for the channel on which the message appears. - **Chunk**: A record type that wraps a compressed set of channel info and message records. - **Attachment**: Extra data that may be included in the file, outside the chunks. Attachments may be quickly listed and accessed via an index at the end of the file. -- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on topic and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the decompressed chunk data. +- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on topic and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the uncompressed chunk data. The attachment index at the end of the file allows for fast listing and location of attachments based on name, timestamp, or attachment type. ## Format Description -An MCAP file is physically structured as a series of concatenated, type- and length-prefixed **"records"**, capped on each end with magic bytes: +An MCAP file is physically structured as a series of concatenated **"records"**, each prefixed with a uint8 type and uint64 length, capped on each end with magic bytes: [...] @@ -49,6 +49,8 @@ These are the magic bytes: > Note: The version byte (ASCII zero 0x30) following "MCAP" will be updated to 1 (0x31) upon ratification of this specification. Until then, backward compatibility is not guaranteed. +The first record in every file must be a Header (op=0x01) and the last record must be a Footer (op=0x02). + MCAP files may be **"chunked"** or **"unchunked"**. Chunked and unchunked files have different constraints on the layout of record types in the file. In chunked files, messages are grouped into optionally-compressed blocks of data before being written to disk. In an unchunked file, each message is written out uncompressed. See the diagrams below for clarity (the record types shown are described in the following section): #### Chunked @@ -95,20 +97,22 @@ An empty KeyValues consists of a zero-value length prefix. #### Header (op=0x01) -The first record in every mcap file is a header. +The first record in every MCAP file is a header. | Bytes | Name | Type | Description | | --- | --- | --- | --- | -| 4+n | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | +| 4 + N | profile | String | The profile to use for interpretation of channel info user data. If the value matches one of the [supported profiles][profiles], the channel info user data section should be structured to match the description in the corresponding profile. This field may also be supplied empty, or containing a framework that is not one of those recognized. | | N | library | String | freeform string for writer to specify its name, version, or other information for use in debugging | | N | metadata | KeyValues | Example keys: robot_id, git_sha, timezone, run_id. | #### Footer (op=0x02) +The last record in every MCAP file is a footer. + | Bytes | Name | Type | Description | | --- | --- | --- | --- | | 8 | index_offset | uint64 | Pointer to start of index section. If there are no records in the index section, this should be zero. | -| 4 | index_crc | uint32 | CRC of all data from index_offset through the byte immediately preceding this CRC. Optionally zero. | +| 4 | index_crc | uint32 | CRC32 checksum of all data from index_offset through the byte immediately preceding this CRC. A value of zero indicates that CRC validation should not be performed. | A file without a footer is **corrupt**, indicating the writer process encountered an unclean shutdown. It may be possible to recover data from a corrupt file. @@ -118,13 +122,13 @@ Identifies a stream of messages on a particular topic and includes information a | Bytes | Name | Type | Description | Example | | --- | --- | --- | --- | --- | -| 2 | channel_id | uint16 | Channel ID 1 | 1 | +| 2 | id | uint16 | Channel ID 1 | 1 | | 4 + N | topic_name | String | Topic | /diagnostics | | 4 + N | encoding | String | Message Encoding | cdr, cbor, ros1, protobuf, etc. | | 4 + N | schema_name | String | Schema Name | std_msgs/Header | -| 4+N | schema | uint32 length-prefixed bytes | Schema | | +| 4 + N | schema | uint32 length-prefixed bytes | Schema | | | N | user_data | KeyValues | Metadata about this channel | used to encode protocol-specific details like callerid, latching, QoS profiles... Refer to [supported profiles][profiles]. | -| 4 | crc | uint32 | CRC checksum of preceding fields in the record. If advantageous for performance, zero may be recorded. Readers will need to skip checksum validation to parse such a file. | | +| 4 | crc | uint32 | CRC32 checksum of preceding fields in the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | | #### Message (op=0x04) @@ -147,7 +151,7 @@ A Chunk is a collection of compressed channel info and message records. | Bytes | Name | Type | Description | Example | | --- | --- | --- | --- | --- | | 8 | uncompressed_size | uint64 | Uncompressed size of of the "records" section. | -| 4 | uncompressed_crc | uint32 | CRC32 checksum of decompressed "records" section. May be set to zero if CRC validation isn't required. | +| 4 | uncompressed_crc | uint32 | CRC32 checksum of uncompressed "records" section. A value of zero indicates that CRC validation should not be performed. | | 4 + N | compression | String | compression algorithm | lz4, zstd, "". A zero-length string indicates no compression. Refer to [supported compression formats][compression formats]. | | N | records | Bytes | Concatenated records, compressed with the algorithm in the "compression" field. | @@ -159,8 +163,8 @@ The Message Index record maps timestamps to message offsets. One message index r | --- | --- | --- | --- | | 2 | channel_id | uint16 | Channel ID. | | 4 | count | uint32 | Number of records in the chunk, on this channel. | -| N | records | KeyValues | Array of timestamp and offset for each record. Offset is relative to the start of the decompressed chunk data. | -| 4 | crc | uint32 | CRC of preceding fields in the record. May be zeroed if not required. | +| N | records | KeyValues | Array of record_time and offset for each record. Offset is relative to the start of the uncompressed chunk data. | +| 4 | crc | uint32 | CRC32 checksum of preceding fields in the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | #### Chunk Index (op=0x07) @@ -170,13 +174,13 @@ The Chunk Index records form a coarse index of timestamps to chunk offsets, alon | --- | --- | --- | --- | | 8 | start_time | Timestamp | First message record timestamp in the chunk. | | 8 | end_time | Timestamp | Last message record timestamp in the chunk. | -| 8 | chunk_offset | uint64 | Offset to the chunk record from the start of the file. | -| N | message_index_offsets | KeyValues | Mapping from channel ID, to the offset of the message index record for that channel after the chunk, from the start of the file. | +| 8 | offset | uint64 | Offset to the chunk record from the start of the file. | +| N | message_index_offsets | KeyValues | Mapping from channel ID to the offset of the message index record for that channel after the chunk, from the start of the file. Duplicate keys are not allowed. | | 8 | message_index_length | uint64 | Total length in bytes of the message index records after the chunk, including lengths and opcodes. | | 4 + N | compression | String | The compression used on this chunk. Refer to [supported compression formats][compression formats]. | | 8 | compressed_size | uint64 | The compressed size of the chunk. | -| 8 | decompressed_size | uint64 | The decompressed size of the chunk. | -| 4 | crc | uint32 | CRC of the preceding fields within the record. | +| 8 | uncompressed_size | uint64 | The uncompressed size of the chunk. | +| 4 | crc | uint32 | CRC32 checksum of the preceding fields within the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | #### Attachment (op=0x08) @@ -184,11 +188,11 @@ Attachments can be used to attach artifacts such as calibration data, text, or c | Bytes | Name | Type | Description | | --- | --- | --- | --- | -| 4+N | name | String | Name of the attachment, e.g "scene1.jpg". | +| 4 + N | name | String | Name of the attachment, e.g "scene1.jpg". | | 8 | record_time | Timestamp | Time at which the attachment was recorded. | -| 4+N | content_type | String | MIME Type (e.g "text/plain"). | -| 8+N | data | uint64 length-prefixed bytes | Size of the attachment. | -| 4 | crc | uint32 | CRC of preceding fields in the record. Optionally zero. | +| 4 + N | content_type | String | MIME Type (e.g "text/plain"). | +| 8 + N | data | uint64 length-prefixed bytes | Attachment data. | +| 4 | crc | uint32 | CRC32 checksum of preceding fields in the record. A value of zero indicates that CRC validation should not be performed. | #### Attachment Index (op=0x09) @@ -197,7 +201,7 @@ The attachment index is an index to named attachments within the file. One recor | Bytes | Name | Type | Description | | --- | --- | --- | --- | | 8 | record_time | Timestamp | Timestamp at which the attachment was recorded. | -| 8 | attachment_size | uint64 | Size of the attachment. | +| 8 | data_size | uint64 | Size of the attachment data. | | 4 + N | name | String | Name of the attachment. | | 4 + N | content_type | String | MIME type of the attachment. | | 8 | offset | uint64 | Byte offset to the attachment, from the start of the file. | @@ -212,10 +216,8 @@ The statistics record contains statistics about the recorded data. It is the las | 4 | channel_count | uint32 | Number of channels in the file across all topics. | | 4 | attachment_count | uint32 | Number of attachments in the file. | | 4 | chunk_count | uint32 | Number of chunks in the file. | -| N | channel_stats | KeyValues | Array of channel IDs and total message counts for the channel. | +| N | channel_message_counts | KeyValues | Mapping from channel ID to total message count for the channel. Duplicate keys are not allowed. | ## Further Reading -Useful links below: - - [Feature explanations][feature explanations]: includes usage details that may be useful to implementers of readers or writers. From dd77e5b81f773aeefbce3794616697befd3e09b9 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Wed, 19 Jan 2022 11:45:08 -0800 Subject: [PATCH 018/635] typescript: v0 reading and unindexed writing (#15) Breaking changes to package exports and new APIs for reading "v0" mcap files as defined in the current spec document. - Renames existing McapReader/Writer to McapPre0Reader/Writer. - Adds `Mcap0StreamReader` and `Mcap0IndexedReader` - Indexed reader does not yet support overlapping or out of order chunks. - Adds `Mcap0UnindexedWriter` - Adds a translation layer for reading pre-0 files and emitting v0 records (`McapPre0LatestStreamReader`) - Updates `validate` script to try reading v0 files as indexed, and fall back to streamed --- .github/workflows/ci.yml | 1 + .vscode/launch.json | 21 + .vscode/settings.json | 3 +- typescript/jest.config.json | 8 + typescript/package.json | 2 + typescript/scripts/bag2proto.ts | 101 ++-- typescript/scripts/validate.ts | 235 ++++++--- typescript/src/McapWriter.ts | 151 ------ typescript/src/common/BufferedWriter.ts | 88 ++++ typescript/src/common/IWritable.ts | 6 + .../src/{ => common}/StreamBuffer.test.ts | 0 typescript/src/{ => common}/StreamBuffer.ts | 0 typescript/src/common/detectVersion.ts | 23 + typescript/src/common/getBigUint64.ts | 18 + typescript/src/index.ts | 7 +- .../McapPre0Reader.test.ts} | 2 +- .../{McapReader.ts => pre0/McapPre0Reader.ts} | 8 +- .../src/pre0/McapPre0To0StreamReader.ts | 72 +++ typescript/src/pre0/McapPre0Writer.ts | 83 ++++ typescript/src/{ => pre0}/constants.ts | 0 typescript/src/{ => pre0}/parse.ts | 20 +- typescript/src/{ => pre0}/types.ts | 8 +- typescript/src/v0/IReadable.ts | 7 + typescript/src/v0/Mcap0IndexedReader.test.ts | 384 +++++++++++++++ typescript/src/v0/Mcap0IndexedReader.ts | 389 +++++++++++++++ typescript/src/v0/Mcap0RecordWriter.test.ts | 104 ++++ typescript/src/v0/Mcap0RecordWriter.ts | 116 +++++ typescript/src/v0/Mcap0StreamReader.test.ts | 458 ++++++++++++++++++ typescript/src/v0/Mcap0StreamReader.ts | 217 +++++++++ typescript/src/v0/Mcap0UnindexedWriter.ts | 61 +++ typescript/src/v0/Reader.ts | 102 ++++ typescript/src/v0/constants.ts | 21 + typescript/src/v0/index.ts | 4 + typescript/src/v0/parse.ts | 332 +++++++++++++ typescript/src/v0/testUtils.ts | 67 +++ typescript/src/v0/types.ts | 105 ++++ yarn.lock | 5 + 37 files changed, 2939 insertions(+), 290 deletions(-) create mode 100644 .vscode/launch.json delete mode 100644 typescript/src/McapWriter.ts create mode 100644 typescript/src/common/BufferedWriter.ts create mode 100644 typescript/src/common/IWritable.ts rename typescript/src/{ => common}/StreamBuffer.test.ts (100%) rename typescript/src/{ => common}/StreamBuffer.ts (100%) create mode 100644 typescript/src/common/detectVersion.ts create mode 100644 typescript/src/common/getBigUint64.ts rename typescript/src/{McapReader.test.ts => pre0/McapPre0Reader.test.ts} (99%) rename typescript/src/{McapReader.ts => pre0/McapPre0Reader.ts} (96%) create mode 100644 typescript/src/pre0/McapPre0To0StreamReader.ts create mode 100644 typescript/src/pre0/McapPre0Writer.ts rename typescript/src/{ => pre0}/constants.ts (100%) rename typescript/src/{ => pre0}/parse.ts (88%) rename typescript/src/{ => pre0}/types.ts (75%) create mode 100644 typescript/src/v0/IReadable.ts create mode 100644 typescript/src/v0/Mcap0IndexedReader.test.ts create mode 100644 typescript/src/v0/Mcap0IndexedReader.ts create mode 100644 typescript/src/v0/Mcap0RecordWriter.test.ts create mode 100644 typescript/src/v0/Mcap0RecordWriter.ts create mode 100644 typescript/src/v0/Mcap0StreamReader.test.ts create mode 100644 typescript/src/v0/Mcap0StreamReader.ts create mode 100644 typescript/src/v0/Mcap0UnindexedWriter.ts create mode 100644 typescript/src/v0/Reader.ts create mode 100644 typescript/src/v0/constants.ts create mode 100644 typescript/src/v0/index.ts create mode 100644 typescript/src/v0/parse.ts create mode 100644 typescript/src/v0/testUtils.ts create mode 100644 typescript/src/v0/types.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fe9afed6b1..ddbcc7e34f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,4 +30,5 @@ jobs: - run: yarn install --frozen-lockfile - run: yarn workspace @foxglove/mcap lint:ci + - run: yarn workspace @foxglove/mcap typecheck - run: yarn workspace @foxglove/mcap test diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000000..f715307939 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,21 @@ +{ + "configurations": [ + { + "type": "node", + "name": "vscode-jest-tests", + "request": "launch", + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "disableOptimisticBPs": true, + "cwd": "${workspaceFolder}", + "runtimeExecutable": "yarn", + "args": [ + "workspace", + "@foxglove/mcap", + "test", + "--runInBand", + "--watchAll=false" + ] + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index e36f697419..e214c29483 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -5,5 +5,6 @@ "eslint.packageManager": "yarn", "eslint.options": { "reportUnusedDisableDirectives": true - } + }, + "jest.jestCommandLine": "yarn workspace @foxglove/mcap test" } diff --git a/typescript/jest.config.json b/typescript/jest.config.json index bb60c0087c..c5dcd8fa38 100644 --- a/typescript/jest.config.json +++ b/typescript/jest.config.json @@ -3,6 +3,14 @@ "transform": { "^.+\\.ts$": "ts-jest" }, + "globals": { + "ts-jest": { + "diagnostics": { + "//": "add 6133 (unused variables) to default ignore codes", + "ignoreCodes": [6059, 18002, 18003, 6133] + } + } + }, "//": "Native find is slow because it does not exclude files: https://github.com/facebook/jest/pull/11264#issuecomment-825377579", "haste": { "forceNodeFilesystemAPI": true } } diff --git a/typescript/package.json b/typescript/package.json index e623c03a86..e75c345ca0 100644 --- a/typescript/package.json +++ b/typescript/package.json @@ -21,6 +21,7 @@ ], "scripts": { "prepack": "tsc -b tsconfig.json tsconfig.cjs.json", + "typecheck": "tsc -p tsconfig.json --noEmit", "lint:ci": "eslint --report-unused-disable-directives .", "lint": "eslint --report-unused-disable-directives --fix .", "test": "jest", @@ -59,6 +60,7 @@ }, "dependencies": { "@foxglove/crc": "^0.0.3", + "heap-js": "^2.1.6", "tslib": "^2" } } diff --git a/typescript/scripts/bag2proto.ts b/typescript/scripts/bag2proto.ts index c440b61ee5..636efd7faf 100644 --- a/typescript/scripts/bag2proto.ts +++ b/typescript/scripts/bag2proto.ts @@ -7,13 +7,15 @@ import { Bag } from "@foxglove/rosbag"; import { FileReader } from "@foxglove/rosbag/node"; import { parse as parseMessageDefinition } from "@foxglove/rosmsg"; +import { Time } from "@foxglove/rosmsg-serialization"; import Bzip2 from "@foxglove/wasm-bz2"; import { program } from "commander"; +import { open, FileHandle } from "fs/promises"; import protobufjs from "protobufjs"; import descriptor from "protobufjs/ext/descriptor"; import decompressLZ4 from "wasm-lz4"; -import { McapWriter, ChannelInfo, Message } from "../src"; +import { Mcap0UnindexedWriter, IWritable, ChannelInfo, Message } from "../src/v0"; const builtinSrc = ` syntax = "proto3"; @@ -121,7 +123,7 @@ function rosMsgDefinitionToProto(typeName: string, msgDef: string): protobufjs.R } type TopicDetail = { - channelInfo: ChannelInfo; + channelId: number; MsgRoot: protobufjs.Type; }; @@ -142,6 +144,18 @@ function convertTypedArrays(msg: Record): Record { + await this.handle.write(buffer); + } +} + async function convert(filePath: string) { await decompressLZ4.isLoaded; const bzip2 = await Bzip2.init(); @@ -152,8 +166,16 @@ async function convert(filePath: string) { const mcapFilePath = filePath.replace(".bag", ".mcap"); console.debug(`Writing to ${mcapFilePath}`); - const mcapFile = new McapWriter(); - await mcapFile.open(mcapFilePath); + const fileHandle = await open(mcapFilePath, "w"); + const fileHandleWritable = new FileHandleWritable(fileHandle); + + const mcapFile = new Mcap0UnindexedWriter(fileHandleWritable); + + await mcapFile.start({ + profile: "", + library: "mcap typescript bag2proto", + metadata: [["original path", mcapFilePath]], + }); const topicToDetailMap = new Map(); @@ -176,10 +198,8 @@ async function convert(filePath: string) { const descriptorMsgEncoded = descriptor.FileDescriptorSet.encode(descriptorMsg).finish(); - const channelInfo: ChannelInfo = { - type: "ChannelInfo", - id: topicToDetailMap.size, - topic: connection.topic, + const channelInfo: Omit = { + topicName: connection.topic, encoding: "protobuf", schemaName, schema: protobufjs.util.base64.encode( @@ -187,17 +207,18 @@ async function convert(filePath: string) { 0, descriptorMsgEncoded.byteLength, ), - data: new ArrayBuffer(0), + userData: [], }; + const channelId = await mcapFile.registerChannel(channelInfo); + topicToDetailMap.set(connection.topic, { - channelInfo, + channelId, MsgRoot, }); - await mcapFile.write(channelInfo); } - const mcapMessages: Array = []; + const readResults: Array<{ topic: string; message: unknown; timestamp: Time }> = []; await bag.readMessages( { decompress: { @@ -206,37 +227,37 @@ async function convert(filePath: string) { }, }, (result) => { - const detail = topicToDetailMap.get(result.topic); - if (!detail) { - return; - } - - const { channelInfo, MsgRoot } = detail; - try { - const rosMsg = convertTypedArrays(result.message as Record); - const protoMsg = MsgRoot.fromObject(rosMsg); - const protoMsgBuffer = MsgRoot.encode(protoMsg).finish(); - - const timestamp = - BigInt(result.timestamp.sec) * 1000000000n + BigInt(result.timestamp.nsec); - const msg: Message = { - type: "Message", - channelInfo, - timestamp, - data: protoMsgBuffer, - }; - - mcapMessages.push(msg); - } catch (err) { - console.error(err); - console.log(result.message); - throw err; - } + readResults.push(result); }, ); - for (const msg of mcapMessages) { - await mcapFile.write(msg); + for (const result of readResults) { + const detail = topicToDetailMap.get(result.topic); + if (!detail) { + return; + } + + const { channelId, MsgRoot } = detail; + try { + const rosMsg = convertTypedArrays(result.message as Record); + const protoMsg = MsgRoot.fromObject(rosMsg); + const protoMsgBuffer = MsgRoot.encode(protoMsg).finish(); + + const timestamp = BigInt(result.timestamp.sec) * 1000000000n + BigInt(result.timestamp.nsec); + const msg: Message = { + channelId, + sequence: 0, + publishTime: timestamp, + recordTime: timestamp, + messageData: protoMsgBuffer, + }; + + await mcapFile.addMessage(msg); + } catch (err) { + console.error(err); + console.log(result.message); + throw err; + } } await mcapFile.end(); diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index 3c230de399..6a443082a2 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -2,12 +2,25 @@ import { parse as parseMessageDefinition, RosMsgDefinition } from "@foxglove/ros import { LazyMessageReader as ROS1LazyMessageReader } from "@foxglove/rosmsg-serialization"; import { MessageReader as ROS2MessageReader } from "@foxglove/rosmsg2-serialization"; import { program } from "commander"; -import fs from "fs"; +import { createReadStream } from "fs"; +import fs from "fs/promises"; import { isEqual } from "lodash"; import { performance } from "perf_hooks"; import decompressLZ4 from "wasm-lz4"; -import { McapReader, McapRecord, ChannelInfo } from "../src"; +import detectVersion, { + DETECT_VERSION_BYTES_REQUIRED, + McapVersion, +} from "../src/common/detectVersion"; +import McapPre0To0StreamReader from "../src/pre0/McapPre0To0StreamReader"; +import Mcap0IndexedReader from "../src/v0/Mcap0IndexedReader"; +import Mcap0StreamReader from "../src/v0/Mcap0StreamReader"; +import { + ChannelInfo, + DecompressHandlers, + McapStreamReader, + TypedMcapRecord, +} from "../src/v0/types"; function log(...data: unknown[]) { console.log(...data); @@ -24,23 +37,67 @@ function formatBytes(totalBytes: number) { return `${bytes.toFixed(2)}${units[unit]!}`; } +async function readStream( + filePath: string, + reader: McapStreamReader, + processRecord: (record: TypedMcapRecord) => void, +) { + const startTime = performance.now(); + let readBytes = 0n; + + await new Promise((resolve, reject) => { + const stream = createReadStream(filePath); + stream.on("data", (data) => { + try { + if (typeof data === "string") { + throw new Error("expected buffer"); + } + readBytes += BigInt(data.byteLength); + reader.append(data); + for (let record; (record = reader.nextRecord()); ) { + processRecord(record); + } + } catch (error) { + reject(error); + stream.close(); + } + }); + stream.on("error", (error) => reject(error)); + stream.on("close", () => resolve()); + }); + + if (!reader.done()) { + throw new Error(`File read incomplete; ${reader.bytesRemaining()} bytes remain after parsing`); + } + + const durationMs = performance.now() - startTime; + log( + `Read ${formatBytes(Number(readBytes))} in ${durationMs.toFixed(2)}ms (${formatBytes( + Number(readBytes) / (durationMs / 1000), + )}/sec)`, + ); +} + async function validate( filePath: string, - { deserialize, dump }: { deserialize: boolean; dump: boolean }, + { deserialize, dump, stream }: { deserialize: boolean; dump: boolean; stream: boolean }, ) { await decompressLZ4.isLoaded; + const decompressHandlers: DecompressHandlers = { + lz4: (buffer, decompressedSize) => decompressLZ4(buffer, Number(decompressedSize)), + }; - const recordCounts = new Map(); + const recordCounts = new Map(); const channelInfoById = new Map< number, { info: ChannelInfo; - messageDeserializer: ROS2MessageReader | ROS1LazyMessageReader; - parsedDefinitions: RosMsgDefinition[]; + messageDeserializer?: ROS2MessageReader | ROS1LazyMessageReader; + parsedDefinitions?: RosMsgDefinition[]; } >(); - function processRecord(record: McapRecord) { + function processRecord(record: TypedMcapRecord) { recordCounts.set(record.type, (recordCounts.get(record.type) ?? 0) + 1); switch (record.type) { @@ -48,10 +105,10 @@ async function validate( break; case "ChannelInfo": { - const existingInfo = channelInfoById.get(record.id); + const existingInfo = channelInfoById.get(record.channelId); if (existingInfo) { if (!isEqual(existingInfo.info, record)) { - throw new Error(`differing channel infos for ${record.id}`); + throw new Error(`differing channel infos for ${record.channelId}`); } break; } @@ -65,10 +122,13 @@ async function validate( ros2: true, }); messageDeserializer = new ROS2MessageReader(parsedDefinitions); + } else if (record.encoding === "protobuf") { + messageDeserializer = undefined; + parsedDefinitions = undefined; } else { throw new Error(`unsupported encoding ${record.encoding}`); } - channelInfoById.set(record.id, { + channelInfoById.set(record.channelId, { info: record, messageDeserializer, parsedDefinitions, @@ -77,26 +137,27 @@ async function validate( } case "Message": { - const channelInfo = channelInfoById.get(record.channelInfo.id); + const channelInfo = channelInfoById.get(record.channelId); if (!channelInfo) { - throw new Error( - `message for channel ${record.channelInfo.id} with no prior channel info`, - ); + throw new Error(`message for channel ${record.channelId} with no prior channel info`); } if (deserialize) { let message: unknown; if (channelInfo.messageDeserializer instanceof ROS1LazyMessageReader) { - const size = channelInfo.messageDeserializer.size(new DataView(record.data)); - if (size !== record.data.byteLength) { + const size = channelInfo.messageDeserializer.size(record.messageData); + if (size !== record.messageData.byteLength) { throw new Error( - `Message size ${size} should match buffer length ${record.data.byteLength}`, + `Message size ${size} should match buffer length ${record.messageData.byteLength}`, ); } - message = channelInfo.messageDeserializer - .readMessage(new DataView(record.data)) - .toJSON(); + message = channelInfo.messageDeserializer.readMessage(record.messageData).toJSON(); } else { - message = channelInfo.messageDeserializer.readMessage(new DataView(record.data)); + if (channelInfo.messageDeserializer == undefined) { + throw new Error( + `No deserializer available for channel id: ${channelInfo.info.channelId} ${channelInfo.info.encoding}`, + ); + } + message = channelInfo.messageDeserializer.readMessage(record.messageData); } if (dump) { log(message); @@ -108,46 +169,95 @@ async function validate( } log("Reading", filePath); - const startTime = performance.now(); - let readBytes = 0n; - const reader = new McapReader({ - includeChunks: true, - decompressHandlers: { - lz4: (buffer, decompressedSize) => decompressLZ4(buffer, Number(decompressedSize)), - }, - }); - await new Promise((resolve, reject) => { - const stream = fs.createReadStream(filePath); - stream.on("data", (data) => { - try { - if (typeof data === "string") { - throw new Error("expected buffer"); - } - readBytes += BigInt(data.byteLength); - reader.append(data); - for (let record; (record = reader.nextRecord()); ) { - processRecord(record); - } - } catch (error) { - reject(error); - stream.close(); + let mcapVersion: McapVersion | undefined; + { + const handle = await fs.open(filePath, "r"); + try { + const buffer = new Uint8Array(DETECT_VERSION_BYTES_REQUIRED); + const readResult = await handle.read({ + buffer, + offset: 0, + length: DETECT_VERSION_BYTES_REQUIRED, + }); + mcapVersion = detectVersion(new DataView(buffer.buffer, 0, readResult.bytesRead)); + if (mcapVersion == undefined) { + throw new Error( + `Not a valid MCAP file: unable to detect version with file header ${Array.from(buffer) + .map((val) => val.toString(16).padStart(2, "0")) + .join(" ")}`, + ); } - }); - stream.on("error", (error) => reject(error)); - stream.on("close", () => resolve()); - }); + log("Detected MCAP version:", mcapVersion); + } finally { + await handle.close(); + } + } - if (!reader.done()) { - throw new Error(`File read incomplete; ${reader.bytesRemaining()} bytes remain after parsing`); + switch (mcapVersion) { + case "pre0": + await readStream( + filePath, + new McapPre0To0StreamReader({ includeChunks: true, decompressHandlers }), + processRecord, + ); + break; + + case "0": + if (!stream) { + const handle = await fs.open(filePath, "r"); + try { + let buffer = new ArrayBuffer(4096); + const reader = await Mcap0IndexedReader.Initialize({ + readable: { + size: async () => BigInt((await handle.stat()).size), + read: async (offset, length) => { + if (offset > Number.MAX_SAFE_INTEGER || length > Number.MAX_SAFE_INTEGER) { + throw new Error(`Read too large: offset ${offset}, length ${length}`); + } + if (length > buffer.byteLength) { + buffer = new ArrayBuffer(Number(length * 2n)); + } + const result = await handle.read({ + buffer: new DataView(buffer, 0, Number(length)), + position: Number(offset), + }); + if (result.bytesRead !== Number(length)) { + throw new Error( + `Read only ${result.bytesRead} bytes from offset ${offset}, expected ${length}`, + ); + } + return new Uint8Array( + result.buffer.buffer, + result.buffer.byteOffset, + result.bytesRead, + ); + }, + }, + decompressHandlers, + }); + for await (const message of reader.readMessages()) { + processRecord(message); + } + break; + } catch (error) { + log( + "Unable to read file as indexed; falling back to streaming:", + (error as Error).message, + error, + ); + } finally { + await handle.close(); + } + } + await readStream( + filePath, + new Mcap0StreamReader({ includeChunks: true, decompressHandlers, validateCrcs: true }), + processRecord, + ); + break; } - const durationMs = performance.now() - startTime; - log( - `Read ${formatBytes(Number(readBytes))} in ${durationMs.toFixed(2)}ms (${formatBytes( - Number(readBytes) / (durationMs / 1000), - )}/sec)`, - ); log("Record counts:"); for (const [type, count] of recordCounts) { log(` ${count.toFixed().padStart(6, " ")} ${type}`); @@ -158,9 +268,12 @@ program .argument("", "path to mcap file(s)") .option("--deserialize", "deserialize message contents", false) .option("--dump", "dump message contents to stdout", false) - .action(async (files: string[], options: { deserialize: boolean; dump: boolean }) => { - for (const file of files) { - await validate(file, options).catch(console.error); - } - }) + .option("--stream", "if a file is indexed, ignore the index and read it as a stream", false) + .action( + async (files: string[], options: { deserialize: boolean; dump: boolean; stream: boolean }) => { + for (const file of files) { + await validate(file, options).catch(console.error); + } + }, + ) .parse(); diff --git a/typescript/src/McapWriter.ts b/typescript/src/McapWriter.ts deleted file mode 100644 index c201bb17a4..0000000000 --- a/typescript/src/McapWriter.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { open, FileHandle } from "fs/promises"; - -import { MCAP_MAGIC, RecordType } from "./constants"; -import { ChannelInfo, McapRecord, Message } from "./types"; - -const LITTLE_ENDIAN = true; - -class Writer { - buffer: ArrayBuffer; - private view: DataView; - private offset = 0; - private textEncoder = new TextEncoder(); - - constructor(scratchBuffer?: ArrayBuffer) { - this.buffer = scratchBuffer ?? new ArrayBuffer(4096); - this.view = new DataView(this.buffer); - } - - size(): number { - return this.offset; - } - - ensureCapacity(capacity: number): void { - if (this.offset + capacity >= this.buffer.byteLength) { - const newBuffer = new ArrayBuffer(this.buffer.byteLength * 2); - new Uint8Array(newBuffer).set(new Uint8Array(this.buffer)); - this.buffer = newBuffer; - this.view = new DataView(newBuffer); - } - } - int8(value: number): void { - this.ensureCapacity(1); - this.view.setInt8(this.offset, value); - this.offset += 1; - } - uint8(value: number): void { - this.ensureCapacity(1); - this.view.setUint8(this.offset, value); - this.offset += 1; - } - int16(value: number): void { - this.ensureCapacity(2); - this.view.setInt16(this.offset, value, LITTLE_ENDIAN); - this.offset += 2; - } - uint16(value: number): void { - this.ensureCapacity(2); - this.view.setUint16(this.offset, value, LITTLE_ENDIAN); - this.offset += 2; - } - int32(value: number): void { - this.ensureCapacity(4); - this.view.setInt32(this.offset, value, LITTLE_ENDIAN); - this.offset += 4; - } - uint32(value: number): void { - this.ensureCapacity(4); - this.view.setUint32(this.offset, value, LITTLE_ENDIAN); - this.offset += 4; - } - int64(value: bigint): void { - this.ensureCapacity(8); - this.view.setBigInt64(this.offset, value, LITTLE_ENDIAN); - this.offset += 8; - } - uint64(value: bigint): void { - this.ensureCapacity(8); - this.view.setBigUint64(this.offset, value, LITTLE_ENDIAN); - this.offset += 8; - } - string(value: string): void { - this.uint32(value.length); - const stringBytes = this.textEncoder.encode(value); - this.ensureCapacity(stringBytes.byteLength); - new Uint8Array(this.buffer, this.offset, stringBytes.byteLength).set(stringBytes); - this.offset += stringBytes.length; - } - - toUint8(): Uint8Array { - return new Uint8Array(this.buffer, 0, this.size()); - } -} - -export default class McapWriter { - private writeStream?: FileHandle; - - async open(pathname: string): Promise { - this.writeStream = await open(pathname, "w"); - - // write the magic - // 0x89, M, C, A, P, \r, \n, \n - await this.writeStream.write(new Uint8Array(MCAP_MAGIC)); - - // write the format version - await this.writeStream.write(new Uint8Array([1])); - } - - async write(record: McapRecord): Promise { - switch (record.type) { - case "ChannelInfo": - await this.writeChannelInfoRecord(record); - break; - case "Message": - await this.writeMessageRecord(record); - break; - default: - throw new Error(`Unsupported record type: ${record.type}`); - } - } - - async end(): Promise { - // write the footer - const serializer = new Writer(); - serializer.uint8(RecordType.FOOTER); - serializer.uint64(0n); - serializer.uint32(0); - await this.writeStream?.write(serializer.toUint8()); - - await this.writeStream?.close(); - } - - private async writeChannelInfoRecord(info: ChannelInfo): Promise { - const serializer = new Writer(); - serializer.uint32(info.id); - serializer.string(info.topic); - serializer.string(info.encoding); - serializer.string(info.schemaName); - serializer.string(info.schema); - - const preamble = new Writer(); - preamble.uint8(RecordType.CHANNEL_INFO); - preamble.uint32(serializer.size()); - - await this.writeStream?.write(preamble.toUint8()); - await this.writeStream?.write(serializer.toUint8()); - } - - private async writeMessageRecord(message: Message): Promise { - const serializer = new Writer(); - serializer.uint32(message.channelInfo.id); - serializer.uint64(message.timestamp); - - const preamble = new Writer(); - preamble.uint8(RecordType.MESSAGE); - preamble.uint32(serializer.size() + message.data.byteLength); - - await this.writeStream?.write(preamble.toUint8()); - await this.writeStream?.write(serializer.toUint8()); - await this.writeStream?.write(new Uint8Array(message.data)); - } -} diff --git a/typescript/src/common/BufferedWriter.ts b/typescript/src/common/BufferedWriter.ts new file mode 100644 index 0000000000..bee9b709f1 --- /dev/null +++ b/typescript/src/common/BufferedWriter.ts @@ -0,0 +1,88 @@ +import { IWritable } from "../v0"; + +const LITTLE_ENDIAN = true; + +export class BufferedWriter { + private buffer = new Uint8Array(4096); + private view: DataView; + private textEncoder = new TextEncoder(); + private offset = 0; + + constructor() { + this.view = new DataView(this.buffer.buffer); + } + + get length(): number { + return this.offset; + } + + ensureCapacity(capacity: number): void { + if (this.offset + capacity >= this.buffer.byteLength) { + const newBuffer = new Uint8Array(this.buffer.byteLength * 2); + newBuffer.set(this.buffer); + + this.buffer = newBuffer; + this.view = new DataView(this.buffer.buffer); + } + } + + int8(value: number): void { + this.ensureCapacity(1); + this.view.setInt8(this.offset, value); + this.offset += 1; + } + uint8(value: number): void { + this.ensureCapacity(1); + this.view.setUint8(this.offset, value); + this.offset += 1; + } + int16(value: number): void { + this.ensureCapacity(2); + this.view.setInt16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + } + uint16(value: number): void { + this.ensureCapacity(2); + this.view.setUint16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + } + int32(value: number): void { + this.ensureCapacity(4); + this.view.setInt32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + } + uint32(value: number): void { + this.ensureCapacity(4); + this.view.setUint32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + } + int64(value: bigint): void { + this.ensureCapacity(8); + this.view.setBigInt64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + } + uint64(value: bigint): void { + this.ensureCapacity(8); + this.view.setBigUint64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + } + string(value: string): void { + const stringBytes = this.textEncoder.encode(value); + this.ensureCapacity(stringBytes.byteLength + 4); + this.uint32(value.length); + this.buffer.set(stringBytes, this.offset); + this.offset += stringBytes.length; + } + + async flush(writable: IWritable): Promise { + if (this.offset === 0) { + return; + } + + try { + await writable.write(this.buffer.slice(0, this.offset)); + } finally { + this.offset = 0; + } + } +} diff --git a/typescript/src/common/IWritable.ts b/typescript/src/common/IWritable.ts new file mode 100644 index 0000000000..6b2d656e8c --- /dev/null +++ b/typescript/src/common/IWritable.ts @@ -0,0 +1,6 @@ +/** + * IWritable describes a writer interface. + */ +export interface IWritable { + write(buffer: Uint8Array): Promise; +} diff --git a/typescript/src/StreamBuffer.test.ts b/typescript/src/common/StreamBuffer.test.ts similarity index 100% rename from typescript/src/StreamBuffer.test.ts rename to typescript/src/common/StreamBuffer.test.ts diff --git a/typescript/src/StreamBuffer.ts b/typescript/src/common/StreamBuffer.ts similarity index 100% rename from typescript/src/StreamBuffer.ts rename to typescript/src/common/StreamBuffer.ts diff --git a/typescript/src/common/detectVersion.ts b/typescript/src/common/detectVersion.ts new file mode 100644 index 0000000000..a8b4244868 --- /dev/null +++ b/typescript/src/common/detectVersion.ts @@ -0,0 +1,23 @@ +import { MCAP_MAGIC } from "../pre0/constants"; +import { MCAP0_MAGIC } from "../v0/constants"; + +export type McapVersion = "pre0" | "0"; + +export const DETECT_VERSION_BYTES_REQUIRED = 8; + +/** + * Detect MCAP version from file prefix. At least `DETECT_VERSION_BYTES_REQUIRED` bytes must be + * provided for the version to be detectable. + */ +export default function detectVersion(prefix: DataView): McapVersion | undefined { + if (prefix.byteLength < DETECT_VERSION_BYTES_REQUIRED) { + return undefined; + } + if (MCAP_MAGIC.every((val, i) => val === prefix.getUint8(i))) { + return "pre0"; + } + if (MCAP0_MAGIC.every((val, i) => val === prefix.getUint8(i))) { + return "0"; + } + return undefined; +} diff --git a/typescript/src/common/getBigUint64.ts b/typescript/src/common/getBigUint64.ts new file mode 100644 index 0000000000..02b275f259 --- /dev/null +++ b/typescript/src/common/getBigUint64.ts @@ -0,0 +1,18 @@ +// DataView.getBigUint64 was added to relatively recent versions of Safari. It's pretty easy to +// maintain this fallback code. +// +// eslint-disable-next-line @foxglove/no-boolean-parameters +export const getBigUint64: (this: DataView, offset: number, littleEndian?: boolean) => bigint = + typeof DataView.prototype.getBigUint64 === "function" + ? DataView.prototype.getBigUint64 // eslint-disable-line @typescript-eslint/unbound-method + : function (this: DataView, offset, littleEndian): bigint { + const lo = + littleEndian === true + ? this.getUint32(offset, littleEndian) + : this.getUint32(offset + 4, littleEndian); + const hi = + littleEndian === true + ? this.getUint32(offset + 4, littleEndian) + : this.getUint32(offset, littleEndian); + return (BigInt(hi) << 32n) | BigInt(lo); + }; diff --git a/typescript/src/index.ts b/typescript/src/index.ts index 94e6da95a4..68879c2d0d 100644 --- a/typescript/src/index.ts +++ b/typescript/src/index.ts @@ -1,4 +1,3 @@ -export { default as McapReader } from "./McapReader"; -export * from "./parse"; -export * from "./types"; -export { default as McapWriter } from "./McapWriter"; +export { default as McapPre0Reader } from "./pre0/McapPre0Reader"; +export * as Pre0Types from "./pre0/types"; +export { default as McapPre0Writer } from "./pre0/McapPre0Writer"; diff --git a/typescript/src/McapReader.test.ts b/typescript/src/pre0/McapPre0Reader.test.ts similarity index 99% rename from typescript/src/McapReader.test.ts rename to typescript/src/pre0/McapPre0Reader.test.ts index 53a76113ef..fd5344dc50 100644 --- a/typescript/src/McapReader.test.ts +++ b/typescript/src/pre0/McapPre0Reader.test.ts @@ -1,6 +1,6 @@ import { crc32 } from "@foxglove/crc"; -import McapReader from "./McapReader"; +import McapReader from "./McapPre0Reader"; import { MCAP_MAGIC, RecordType } from "./constants"; function uint32LE(n: number): Uint8Array { diff --git a/typescript/src/McapReader.ts b/typescript/src/pre0/McapPre0Reader.ts similarity index 96% rename from typescript/src/McapReader.ts rename to typescript/src/pre0/McapPre0Reader.ts index 1d84552f2a..d37249e574 100644 --- a/typescript/src/McapReader.ts +++ b/typescript/src/pre0/McapPre0Reader.ts @@ -1,6 +1,6 @@ import { crc32 } from "@foxglove/crc"; -import StreamBuffer from "./StreamBuffer"; +import StreamBuffer from "../common/StreamBuffer"; import { MCAP_MAGIC } from "./constants"; import { parseMagic, parseRecord } from "./parse"; import { ChannelInfo, McapRecord } from "./types"; @@ -44,7 +44,7 @@ type McapReaderOptions = { * }); * ``` */ -export default class McapReader { +export default class McapPre0Reader { private buffer = new StreamBuffer(MCAP_MAGIC.length * 2); private decompressHandlers; private includeChunks; @@ -132,8 +132,6 @@ export default class McapReader { switch (record.type) { case "ChannelInfo": case "Message": - case "IndexData": - case "ChunkInfo": yield record; break; @@ -172,8 +170,6 @@ export default class McapReader { ) { switch (chunkResult.record.type) { case "Chunk": - case "IndexData": - case "ChunkInfo": case "Footer": throw new Error(`${chunkResult.record.type} record not allowed inside a chunk`); case "ChannelInfo": diff --git a/typescript/src/pre0/McapPre0To0StreamReader.ts b/typescript/src/pre0/McapPre0To0StreamReader.ts new file mode 100644 index 0000000000..8e48ef290f --- /dev/null +++ b/typescript/src/pre0/McapPre0To0StreamReader.ts @@ -0,0 +1,72 @@ +import { McapStreamReader, TypedMcapRecord } from "../v0/types"; +import McapPre0Reader from "./McapPre0Reader"; +import { McapRecord as McapPre0Record } from "./types"; + +function translateRecord(record: McapPre0Record): TypedMcapRecord { + switch (record.type) { + case "ChannelInfo": + return { + type: "ChannelInfo", + channelId: record.id, + topicName: record.topic, + encoding: record.encoding, + schemaName: record.schemaName, + schema: record.schema, + userData: [], + }; + case "Message": + return { + type: "Message", + channelId: record.channelInfo.id, + sequence: 0, + publishTime: record.timestamp, + recordTime: record.timestamp, + messageData: new Uint8Array(record.data), + }; + case "Chunk": + return { + type: "Chunk", + uncompressedSize: record.decompressedSize, + uncompressedCrc: record.decompressedCrc, + compression: record.compression, + records: new Uint8Array(record.data), + }; + case "Footer": + return { + type: "Footer", + indexOffset: 0n, + indexCrc: 0, + }; + } +} + +/** + * Stream reader which translates pre0 records to the v0 record format. + */ +export default class McapPre0To0StreamReader implements McapStreamReader { + private reader: McapPre0Reader; + + constructor(...params: ConstructorParameters) { + this.reader = new McapPre0Reader(...params); + } + + done(): boolean { + return this.reader.done(); + } + + bytesRemaining(): number { + return this.reader.bytesRemaining(); + } + + append(data: Uint8Array): void { + this.reader.append(data); + } + + nextRecord(): TypedMcapRecord | undefined { + const record = this.reader.nextRecord(); + if (!record) { + return undefined; + } + return translateRecord(record); + } +} diff --git a/typescript/src/pre0/McapPre0Writer.ts b/typescript/src/pre0/McapPre0Writer.ts new file mode 100644 index 0000000000..329c5fbf46 --- /dev/null +++ b/typescript/src/pre0/McapPre0Writer.ts @@ -0,0 +1,83 @@ +import { open, FileHandle } from "fs/promises"; + +import { BufferedWriter } from "../common/BufferedWriter"; +import { MCAP_MAGIC, RecordType } from "./constants"; +import { ChannelInfo, McapRecord, Message } from "./types"; + +export default class McapPre0Writer { + private writeStream?: FileHandle; + + async open(pathname: string): Promise { + this.writeStream = await open(pathname, "w"); + + // write the magic + // 0x89, M, C, A, P, \r, \n, \n + await this.writeStream.write(new Uint8Array(MCAP_MAGIC)); + + // write the format version + await this.writeStream.write(new Uint8Array([1])); + } + + async write(record: McapRecord): Promise { + switch (record.type) { + case "ChannelInfo": + await this.writeChannelInfoRecord(record); + break; + case "Message": + await this.writeMessageRecord(record); + break; + default: + throw new Error(`Unsupported record type: ${record.type}`); + } + } + + async end(): Promise { + if (!this.writeStream) { + return; + } + // write the footer + const serializer = new BufferedWriter(); + serializer.uint8(RecordType.FOOTER); + serializer.uint64(0n); + serializer.uint32(0); + await serializer.flush(this.writeStream); + + await this.writeStream?.close(); + } + + private async writeChannelInfoRecord(info: ChannelInfo): Promise { + if (!this.writeStream) { + return; + } + const serializer = new BufferedWriter(); + serializer.uint32(info.id); + serializer.string(info.topic); + serializer.string(info.encoding); + serializer.string(info.schemaName); + serializer.string(info.schema); + + const preamble = new BufferedWriter(); + preamble.uint8(RecordType.CHANNEL_INFO); + preamble.uint32(serializer.length); + + await preamble.flush(this.writeStream); + await serializer.flush(this.writeStream); + } + + private async writeMessageRecord(message: Message): Promise { + if (!this.writeStream) { + return; + } + const serializer = new BufferedWriter(); + serializer.uint32(message.channelInfo.id); + serializer.uint64(message.timestamp); + + const preamble = new BufferedWriter(); + preamble.uint8(RecordType.MESSAGE); + preamble.uint32(serializer.length + message.data.byteLength); + + await preamble.flush(this.writeStream); + await serializer.flush(this.writeStream); + await this.writeStream?.write(new Uint8Array(message.data)); + } +} diff --git a/typescript/src/constants.ts b/typescript/src/pre0/constants.ts similarity index 100% rename from typescript/src/constants.ts rename to typescript/src/pre0/constants.ts diff --git a/typescript/src/parse.ts b/typescript/src/pre0/parse.ts similarity index 88% rename from typescript/src/parse.ts rename to typescript/src/pre0/parse.ts index db26cc75ca..58c666151d 100644 --- a/typescript/src/parse.ts +++ b/typescript/src/pre0/parse.ts @@ -1,27 +1,9 @@ import { isEqual } from "lodash"; +import { getBigUint64 } from "../common/getBigUint64"; import { MCAP_MAGIC, RecordType } from "./constants"; import { McapMagic, McapRecord, ChannelInfo } from "./types"; -// DataView.getBigUint64 was added to relatively recent versions of Safari. It's pretty easy to -// maintain this fallback code. -// -// eslint-disable-next-line @foxglove/no-boolean-parameters -const getBigUint64: (this: DataView, offset: number, littleEndian?: boolean) => bigint = - typeof DataView.prototype.getBigUint64 === "function" - ? DataView.prototype.getBigUint64 // eslint-disable-line @typescript-eslint/unbound-method - : function (this: DataView, offset, littleEndian): bigint { - const lo = - littleEndian === true - ? this.getUint32(offset, littleEndian) - : this.getUint32(offset + 4, littleEndian); - const hi = - littleEndian === true - ? this.getUint32(offset + 4, littleEndian) - : this.getUint32(offset, littleEndian); - return (BigInt(hi) << 32n) | BigInt(lo); - }; - /** * Parse a MCAP magic string and format version at `startOffset` in `view`. */ diff --git a/typescript/src/types.ts b/typescript/src/pre0/types.ts similarity index 75% rename from typescript/src/types.ts rename to typescript/src/pre0/types.ts index 1c79d18a63..1cff53b120 100644 --- a/typescript/src/types.ts +++ b/typescript/src/pre0/types.ts @@ -24,16 +24,10 @@ export type Chunk = { decompressedCrc: number; data: ArrayBuffer; }; -export type IndexData = { - type: "IndexData"; -}; -export type ChunkInfo = { - type: "ChunkInfo"; -}; export type Footer = { type: "Footer"; indexPos: bigint; indexCrc: number; }; -export type McapRecord = ChannelInfo | Message | Chunk | IndexData | ChunkInfo | Footer; +export type McapRecord = ChannelInfo | Message | Chunk | Footer; diff --git a/typescript/src/v0/IReadable.ts b/typescript/src/v0/IReadable.ts new file mode 100644 index 0000000000..5975f14148 --- /dev/null +++ b/typescript/src/v0/IReadable.ts @@ -0,0 +1,7 @@ +/** + * IReadable describes a random-access reader interface. + */ +export interface IReadable { + size(): Promise; + read(offset: bigint, size: bigint): Promise; +} diff --git a/typescript/src/v0/Mcap0IndexedReader.test.ts b/typescript/src/v0/Mcap0IndexedReader.test.ts new file mode 100644 index 0000000000..fe05fed8c7 --- /dev/null +++ b/typescript/src/v0/Mcap0IndexedReader.test.ts @@ -0,0 +1,384 @@ +import { crc32 } from "@foxglove/crc"; + +import Mcap0IndexedReader from "./Mcap0IndexedReader"; +import { MCAP0_MAGIC, Opcode } from "./constants"; +import { + record, + uint64LE, + uint32LE, + string, + keyValues, + collect, + crcSuffix, + uint16LE, +} from "./testUtils"; +import { TypedMcapRecords } from "./types"; + +function makeReadable(data: Uint8Array) { + let readCalls = 0; + return { + get readCalls() { + return readCalls; + }, + size: async () => BigInt(data.length), + read: async (offset: bigint, size: bigint) => { + ++readCalls; + if (offset > Number.MAX_SAFE_INTEGER || size > Number.MAX_SAFE_INTEGER) { + throw new Error(`Read too large: offset ${offset}, size ${size}`); + } + if (offset < 0 || size < 0 || offset + size > data.length) { + throw new Error( + `Read out of range: offset ${offset}, size ${size} (data.length: ${data.length})`, + ); + } + return data.slice(Number(offset), Number(offset + size)); + }, + }; +} + +describe("Mcap0IndexedReader", () => { + it("rejects files that are too small", async () => { + await expect( + Mcap0IndexedReader.Initialize({ + readable: makeReadable( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ), + }), + ).rejects.toThrow("too small to be valid MCAP"); + + await expect( + Mcap0IndexedReader.Initialize({ + readable: makeReadable( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ...MCAP0_MAGIC, + ]), + ), + }), + ).rejects.toThrow("too small to be valid MCAP"); + }); + + it("rejects unindexed file", async () => { + const readable = makeReadable( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + await expect(Mcap0IndexedReader.Initialize({ readable })).rejects.toThrow( + "File is not indexed", + ); + }); + + it("parses file with empty index", async () => { + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const indexOffset = data.length; + data.push( + ...record(Opcode.FOOTER, [ + ...uint64LE(BigInt(indexOffset)), // index offset + ...uint32LE( + crc32( + new Uint8Array([ + Opcode.FOOTER, + ...uint64LE(/*index offset*/ 8n + /*index crc*/ 4n), //record length + ...uint64LE(BigInt(indexOffset)), //index offset + ]), + ), + ), // index crc + ]), + ...MCAP0_MAGIC, + ); + const readable = makeReadable(new Uint8Array(data)); + const reader = await Mcap0IndexedReader.Initialize({ readable }); + await expect(collect(reader.readMessages())).resolves.toEqual([]); + expect(readable.readCalls).toBe(2); + }); + + it("rejects invalid index crc", async () => { + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const indexOffset = data.length; + data.push( + ...record(Opcode.FOOTER, [ + ...uint64LE(BigInt(indexOffset)), // index offset + ...uint32LE(crc32(new Uint8Array([42]))), // index crc + ]), + ...MCAP0_MAGIC, + ); + const readable = makeReadable(new Uint8Array(data)); + await expect(Mcap0IndexedReader.Initialize({ readable })).rejects.toThrow( + "Incorrect index CRC 1565496904 (expected 163128923)", + ); + }); + + it("parses index with channel info", async () => { + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const indexOffset = data.length; + data.push( + ...record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + ...record(Opcode.FOOTER, [ + ...uint64LE(BigInt(indexOffset)), // index offset + ...uint32LE(crc32(new Uint8Array(0))), // index crc + ]), + ...MCAP0_MAGIC, + ); + const readable = makeReadable(new Uint8Array(data)); + const reader = await Mcap0IndexedReader.Initialize({ readable }); + await expect(collect(reader.readMessages())).resolves.toEqual([]); + expect(reader.channelInfosById).toEqual( + new Map([ + [ + 42, + { + type: "ChannelInfo", + channelId: 42, + topicName: "mytopic", + encoding: "utf12", + schemaName: "some data", + schema: "stuff", + userData: [["foo", "bar"]], + }, + ], + ]), + ); + expect(readable.readCalls).toBe(2); + }); + + describe("indexed with single channel", () => { + const message1: TypedMcapRecords["Message"] = { + type: "Message", + channelId: 42, + sequence: 1, + publishTime: 0n, + recordTime: 10n, + messageData: new Uint8Array(), + }; + const message2: TypedMcapRecords["Message"] = { + type: "Message", + channelId: 42, + sequence: 2, + publishTime: 1n, + recordTime: 11n, + messageData: new Uint8Array(), + }; + const message3: TypedMcapRecords["Message"] = { + type: "Message", + channelId: 42, + sequence: 3, + publishTime: 2n, + recordTime: 12n, + messageData: new Uint8Array(), + }; + it.each([ + { startTime: undefined, endTime: undefined, expected: [message1, message2, message3] }, + { startTime: 11n, endTime: 11n, expected: [message2] }, + { startTime: 11n, endTime: undefined, expected: [message2, message3] }, + { startTime: undefined, endTime: 11n, expected: [message1, message2] }, + { startTime: 10n, endTime: 12n, expected: [message1, message2, message3] }, + ])( + "fetches chunk data and reads requested messages between $startTime and $endTime", + async ({ startTime, endTime, expected }) => { + const channelInfo = record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ); + const message1Data = record(Opcode.MESSAGE, [ + ...uint16LE(message1.channelId), // channel id + ...uint32LE(message1.sequence), // sequence + ...uint64LE(message1.publishTime), // publish time + ...uint64LE(message1.recordTime), // record time + ]); + const message2Data = record(Opcode.MESSAGE, [ + ...uint16LE(message2.channelId), // channel id + ...uint32LE(message2.sequence), // sequence + ...uint64LE(message2.publishTime), // publish time + ...uint64LE(message2.recordTime), // record time + ]); + const message3Data = record(Opcode.MESSAGE, [ + ...uint16LE(message3.channelId), // channel id + ...uint32LE(message3.sequence), // sequence + ...uint64LE(message3.publishTime), // publish time + ...uint64LE(message3.recordTime), // record time + ]); + const chunkContents = [...channelInfo]; + const message1Offset = BigInt(chunkContents.length); + chunkContents.push(...message1Data); + const message2Offset = BigInt(chunkContents.length); + chunkContents.push(...message2Data); + const message3Offset = BigInt(chunkContents.length); + chunkContents.push(...message3Data); + + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const chunkOffset = BigInt(data.length); + data.push( + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(new Uint8Array(chunkContents))), // decompressed crc32 + ...string(""), // compression + ...chunkContents, + ]), + ); + const messageIndexOffset = BigInt(data.length); + data.push( + ...record( + Opcode.MESSAGE_INDEX, + crcSuffix([ + ...uint16LE(42), // channel id + ...uint32LE(1), // count + ...keyValues(uint64LE, uint64LE, [ + [message1.recordTime, message1Offset], + [message2.recordTime, message2Offset], + [message3.recordTime, message3Offset], + ]), // records + ]), + ), + ); + const messageIndexLength = BigInt(data.length) - messageIndexOffset; + const indexOffset = data.length; + data.push( + ...channelInfo, + ...record( + Opcode.CHUNK_INDEX, + crcSuffix([ + ...uint64LE(message1.recordTime), // start time + ...uint64LE(message3.recordTime), // end time + ...uint64LE(chunkOffset), // offset + ...keyValues(uint16LE, uint64LE, [[42, messageIndexOffset]]), // message index offsets + ...uint64LE(messageIndexLength), // message index length + ...string(""), // compression + ...uint64LE(BigInt(chunkContents.length)), // compressed size + ...uint64LE(BigInt(chunkContents.length)), // uncompressed size + ]), + ), + ...record(Opcode.FOOTER, [ + ...uint64LE(BigInt(indexOffset)), // index offset + ...uint32LE(crc32(new Uint8Array(0))), // index crc + ]), + ...MCAP0_MAGIC, + ); + const readable = makeReadable(new Uint8Array(data)); + const reader = await Mcap0IndexedReader.Initialize({ readable }); + await expect(collect(reader.readMessages({ startTime, endTime }))).resolves.toEqual( + expected, + ); + expect(readable.readCalls).toBe(4); + }, + ); + }); + + it("does not yet support overlapping chunks", async () => { + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const indexOffset = BigInt(data.length); + data.push( + ...record( + Opcode.CHUNK_INDEX, + crcSuffix([ + ...uint64LE(0n), // start time + ...uint64LE(1n), // end time + ...uint64LE(0n), // offset + ...keyValues(uint16LE, uint64LE, []), // message index offsets + ...uint64LE(0n), // message index length + ...string(""), // compression + ...uint64LE(BigInt(0n)), // compressed size + ...uint64LE(BigInt(0n)), // uncompressed size + ]), + ), + ...record( + Opcode.CHUNK_INDEX, + crcSuffix([ + ...uint64LE(1n), // start time + ...uint64LE(2n), // end time + ...uint64LE(0n), // offset + ...keyValues(uint16LE, uint64LE, []), // message index offsets + ...uint64LE(0n), // message index length + ...string(""), // compression + ...uint64LE(BigInt(0n)), // compressed size + ...uint64LE(BigInt(0n)), // uncompressed size + ]), + ), + ...record(Opcode.FOOTER, [ + ...uint64LE(BigInt(indexOffset)), // index offset + ...uint32LE(crc32(new Uint8Array(0))), // index crc + ]), + ...MCAP0_MAGIC, + ); + const reader = await Mcap0IndexedReader.Initialize({ + readable: makeReadable(new Uint8Array(data)), + }); + await expect(collect(reader.readMessages())).rejects.toThrow( + "Overlapping chunks are not currently supported", + ); + }); +}); diff --git a/typescript/src/v0/Mcap0IndexedReader.ts b/typescript/src/v0/Mcap0IndexedReader.ts new file mode 100644 index 0000000000..098d56cc66 --- /dev/null +++ b/typescript/src/v0/Mcap0IndexedReader.ts @@ -0,0 +1,389 @@ +import { crc32, crc32Final, crc32Init, crc32Update } from "@foxglove/crc"; +import Heap from "heap-js"; + +import { getBigUint64 } from "../common/getBigUint64"; +import { IReadable } from "./IReadable"; +import { MCAP0_MAGIC, Opcode } from "./constants"; +import { parseMagic, parseRecord } from "./parse"; +import { DecompressHandlers, TypedMcapRecords } from "./types"; + +export default class Mcap0IndexedReader { + readonly chunkIndexes: readonly TypedMcapRecords["ChunkIndex"][]; + readonly channelInfosById: ReadonlyMap; + + private readable: IReadable; + private decompressHandlers?: DecompressHandlers; + private readwriteChannelInfosById: Map; + + private startTime: bigint | undefined; + private endTime: bigint | undefined; + + private constructor({ + readable, + chunkIndexes, + decompressHandlers, + channelInfosById, + }: { + readable: IReadable; + chunkIndexes: readonly TypedMcapRecords["ChunkIndex"][]; + decompressHandlers?: DecompressHandlers; + channelInfosById: Map; + }) { + this.readable = readable; + this.chunkIndexes = chunkIndexes; + this.decompressHandlers = decompressHandlers; + this.channelInfosById = channelInfosById; + this.readwriteChannelInfosById = channelInfosById; + + for (const chunk of chunkIndexes) { + if (this.startTime == undefined || chunk.startTime < this.startTime) { + this.startTime = chunk.startTime; + } + if (this.endTime == undefined || chunk.endTime > this.endTime) { + this.endTime = chunk.endTime; + } + } + } + + static async Initialize({ + readable, + decompressHandlers, + }: { + readable: IReadable; + + /** + * When a compressed chunk is encountered, the entry in `decompressHandlers` corresponding to the + * compression will be called to decompress the chunk data. + */ + decompressHandlers?: DecompressHandlers; + }): Promise { + const size = await readable.size(); + let footerOffset: bigint; + let footerView: DataView; + { + const headerLengthLowerBound = BigInt( + MCAP0_MAGIC.length + + /* Opcode.HEADER */ 1 + + /* record length */ 8 + + /* profile length */ 4 + + /* library length */ 4, + ); + const footerReadLength = BigInt( + /* Opcode.FOOTER */ 1 + + /* record length */ 8 + + /* indexOffset */ 8 + + /* indexCrc */ 4 + + MCAP0_MAGIC.length, + ); + if (size < headerLengthLowerBound + footerReadLength) { + throw new Error(`File size (${size}) is too small to be valid MCAP`); + } + footerOffset = size - footerReadLength; + const footerBuffer = await readable.read(footerOffset, footerReadLength); + footerView = new DataView( + footerBuffer.buffer, + footerBuffer.byteOffset, + footerBuffer.byteLength, + ); + } + + void parseMagic(footerView, footerView.byteLength - MCAP0_MAGIC.length); + + const channelInfosById = new Map(); + + const footer = parseRecord({ + view: footerView, + startOffset: 0, + channelInfosById: new Map(), + validateCrcs: true, + }).record; + if (footer?.type !== "Footer") { + throw new Error( + `Unable to read footer from end of file (offset ${footerOffset}); found ${ + footer?.type ?? "nothing" + }`, + ); + } + if (footer.indexOffset === 0n) { + throw new Error("File is not indexed"); + } + + // Future optimization: avoid holding whole index blob in memory at once + const indexData = await readable.read(footer.indexOffset, footerOffset - footer.indexOffset); + if (footer.indexCrc !== 0) { + let indexCrc = crc32Init(); + indexCrc = crc32Update(indexCrc, indexData); + indexCrc = crc32Update( + indexCrc, + new DataView( + footerView.buffer, + footerView.byteOffset, + /* Opcode.FOOTER */ 1 + /* record length */ 8 + /* indexOffset */ 8, + ), + ); + indexCrc = crc32Final(indexCrc); + if (indexCrc !== footer.indexCrc) { + throw new Error(`Incorrect index CRC ${indexCrc} (expected ${footer.indexCrc})`); + } + } + + const indexView = new DataView(indexData.buffer, indexData.byteOffset, indexData.byteLength); + + const chunkIndexes: TypedMcapRecords["ChunkIndex"][] = []; + const attachmentIndexes: TypedMcapRecords["AttachmentIndex"][] = []; + let statistics: TypedMcapRecords["Statistics"] | undefined; + + let offset = 0; + for ( + let result; + (result = parseRecord({ + view: indexView, + startOffset: offset, + channelInfosById, + validateCrcs: true, + })), + result.record; + offset += result.usedBytes + ) { + switch (result.record.type) { + case "ChannelInfo": + // detection of duplicates is done in parseRecord + break; + case "ChunkIndex": + chunkIndexes.push(result.record); + break; + case "AttachmentIndex": + attachmentIndexes.push(result.record); + break; + case "Statistics": + if (statistics) { + throw new Error("Duplicate Statistics record"); + } + statistics = result.record; + break; + case "Unknown": + break; + default: + throw new Error(`${result.record.type} record not allowed in index section`); + } + } + if (offset !== indexView.byteLength) { + throw new Error(`${indexView.byteLength - offset} bytes remaining in index section`); + } + + return new Mcap0IndexedReader({ + readable, + chunkIndexes, + decompressHandlers, + channelInfosById, + }); + } + + async *readMessages({ + topics, + startTime = this.startTime, + endTime = this.endTime, + }: { + topics?: readonly string[]; + startTime?: bigint; + endTime?: bigint; + } = {}): AsyncGenerator { + if (startTime == undefined || endTime == undefined) { + return; + } + + let relevantChannels: Set | undefined; + if (topics) { + relevantChannels = new Set(); + for (const channelInfo of this.channelInfosById.values()) { + if (topics.includes(channelInfo.topicName)) { + relevantChannels.add(channelInfo.channelId); + } + } + } + + const relevantChunks = this.chunkIndexes.filter( + (chunk) => chunk.startTime <= endTime && chunk.endTime >= startTime, + ); + + for (let i = 0; i + 1 < relevantChunks.length; i++) { + if (relevantChunks[i]!.endTime >= relevantChunks[i + 1]!.startTime) { + throw new Error("Overlapping chunks are not currently supported"); + } + } + for (const chunkIndex of relevantChunks) { + yield* this.readChunk({ chunkIndex, channelIds: relevantChannels, startTime, endTime }); + } + } + + private async *readChunk({ + chunkIndex, + channelIds, + startTime, + endTime, + }: { + chunkIndex: TypedMcapRecords["ChunkIndex"]; + channelIds: ReadonlySet | undefined; + startTime: bigint; + endTime: bigint; + }): AsyncGenerator { + const chunkOpcodeAndLength = await this.readable.read(chunkIndex.chunkOffset, 1n + 8n); + const chunkOpcodeAndLengthView = new DataView( + chunkOpcodeAndLength.buffer, + chunkOpcodeAndLength.byteOffset, + chunkOpcodeAndLength.byteLength, + ); + if (chunkOpcodeAndLengthView.getUint8(0) !== Opcode.CHUNK) { + throw new Error( + `Chunk index offset does not point to chunk record (expected opcode ${ + Opcode.CHUNK + }, found ${chunkOpcodeAndLengthView.getUint8(0)})`, + ); + } + const chunkRecordLength = getBigUint64.call(chunkOpcodeAndLengthView, 1, true); + + // Future optimization: read only message indexes for given channelIds, not all message indexes for the chunk + const chunkAndMessageIndexes = await this.readable.read( + chunkIndex.chunkOffset, + 1n + 8n + chunkRecordLength + chunkIndex.messageIndexLength, + ); + const chunkAndMessageIndexesView = new DataView( + chunkAndMessageIndexes.buffer, + chunkAndMessageIndexes.byteOffset, + chunkAndMessageIndexes.byteLength, + ); + + let chunk: TypedMcapRecords["Chunk"]; + const messageIndexCursors = new Heap<{ + index: number; + channelId: number; + records: TypedMcapRecords["MessageIndex"]["records"]; + }>((a, b) => { + const recordTimeA = a.records[a.index]?.[0]; + const recordTimeB = b.records[b.index]?.[0]; + if (recordTimeA == undefined) { + return 1; + } else if (recordTimeB == undefined) { + return -1; + } + return Number(recordTimeA - recordTimeB); + }); + + { + let offset = 0; + const chunkResult = parseRecord({ + view: chunkAndMessageIndexesView, + startOffset: offset, + channelInfosById: this.readwriteChannelInfosById, + validateCrcs: true, + }); + offset += chunkResult.usedBytes; + if (chunkResult.record?.type !== "Chunk") { + throw new Error( + `Chunk index offset does not point to chunk record (found ${String( + chunkResult.record?.type, + )})`, + ); + } + chunk = chunkResult.record; + + for ( + let result; + (result = parseRecord({ + view: chunkAndMessageIndexesView, + startOffset: offset, + channelInfosById: this.readwriteChannelInfosById, + validateCrcs: true, + })), + result.record; + offset += result.usedBytes + ) { + if (result.record.type !== "MessageIndex") { + throw new Error(`Unexpected record type ${result.record.type} in message index section`); + } + if ( + result.record.records.length > 0 && + (channelIds == undefined || channelIds.has(result.record.channelId)) + ) { + for (let i = 0; i + 1 < result.record.records.length; i++) { + if (result.record.records[i]![0] >= result.record.records[i + 1]![0]) { + throw new Error( + `Message index entries for channel ${result.record.channelId} in chunk at offset ${chunkIndex.chunkOffset} must be sorted by recordTime`, + ); + } + } + messageIndexCursors.push({ + index: 0, + channelId: result.record.channelId, + records: result.record.records, + }); + } + } + if (offset !== chunkAndMessageIndexesView.byteLength) { + throw new Error( + `${ + chunkAndMessageIndexesView.byteLength - offset + } bytes remaining in message index section`, + ); + } + } + + let buffer = chunk.records; + if (chunk.compression !== "" && buffer.byteLength > 0) { + const decompress = this.decompressHandlers?.[chunk.compression]; + if (!decompress) { + throw new Error(`Unsupported compression ${chunk.compression}`); + } + buffer = decompress(buffer, chunk.uncompressedSize); + } + if (chunk.uncompressedCrc !== 0) { + const chunkCrc = crc32(buffer); + if (chunkCrc !== chunk.uncompressedCrc) { + throw new Error(`Incorrect chunk CRC ${chunkCrc} (expected ${chunk.uncompressedCrc})`); + } + } + + const recordsView = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength); + + let cursor; + while ((cursor = messageIndexCursors.peek())) { + const [recordTime, offset] = cursor.records[cursor.index]!; + if (recordTime >= startTime && recordTime <= endTime) { + if (BigInt(recordsView.byteOffset) + offset >= Number.MAX_SAFE_INTEGER) { + throw new Error( + `Message offset too large (recordTime ${recordTime}, offset ${offset}) in channel ${cursor.channelId} in chunk at offset ${chunkIndex.chunkOffset}`, + ); + } + const result = parseRecord({ + view: recordsView, + startOffset: Number(offset), + channelInfosById: this.readwriteChannelInfosById, + validateCrcs: true, + }); + if (!result.record) { + throw new Error( + `Unable to parse record at offset ${offset} in chunk at offset ${chunkIndex.chunkOffset}`, + ); + } + if (result.record.type !== "Message") { + throw new Error( + `Unexpected record type ${result.record.type} in message index (time ${recordTime}, offset ${offset} in chunk at offset ${chunkIndex.chunkOffset})`, + ); + } + if (result.record.recordTime !== recordTime) { + throw new Error( + `Message recordTime ${result.record.recordTime} did not match message index entry (${recordTime} at offset ${offset} in chunk at offset ${chunkIndex.chunkOffset})`, + ); + } + yield result.record; + } + + if (cursor.index + 1 < cursor.records.length && recordTime <= endTime) { + cursor.index++; + messageIndexCursors.replace(cursor); + } else { + messageIndexCursors.pop(); + } + } + } +} diff --git a/typescript/src/v0/Mcap0RecordWriter.test.ts b/typescript/src/v0/Mcap0RecordWriter.test.ts new file mode 100644 index 0000000000..9c50af54d6 --- /dev/null +++ b/typescript/src/v0/Mcap0RecordWriter.test.ts @@ -0,0 +1,104 @@ +import { IWritable } from "."; +import { Mcap0RecordWriter } from "./Mcap0RecordWriter"; + +class MemoryWritable implements IWritable { + private fullBuffer: Uint8Array; + private offset = 0; + + get length() { + return this.offset; + } + + get buffer(): Readonly { + return this.fullBuffer.slice(0, this.offset); + } + + constructor() { + this.fullBuffer = new Uint8Array(4096); + } + + async write(buffer: Uint8Array): Promise { + this.fullBuffer.set(buffer, this.offset); + this.offset += buffer.length; + } +} + +describe("Mcap0RecordWriter", () => { + it("writes magic", async () => { + const memoryWritable = new MemoryWritable(); + const writer = new Mcap0RecordWriter(memoryWritable); + + await writer.writeMagic(); + expect(memoryWritable.buffer).toEqual(new Uint8Array([137, 77, 67, 65, 80, 48, 13, 10])); + }); + + it("writes header", async () => { + const memoryWritable = new MemoryWritable(); + const writer = new Mcap0RecordWriter(memoryWritable); + + await writer.writeHeader({ + profile: "foo", + library: "bar", + metadata: [["something", "magical"]], + }); + expect(memoryWritable.buffer).toEqual( + new Uint8Array([ + 1, 42, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 102, 111, 111, 3, 0, 0, 0, 98, 97, 114, 24, 0, 0, 0, + 9, 0, 0, 0, 115, 111, 109, 101, 116, 104, 105, 110, 103, 7, 0, 0, 0, 109, 97, 103, 105, 99, + 97, 108, + ]), + ); + }); + + it("writes footer", async () => { + const memoryWritable = new MemoryWritable(); + const writer = new Mcap0RecordWriter(memoryWritable); + + await writer.writeFooter({ + indexOffset: 0n, + indexCrc: 0, + }); + expect(memoryWritable.buffer).toEqual( + new Uint8Array([2, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + ); + }); + + it("writes channel info", async () => { + const memoryWritable = new MemoryWritable(); + const writer = new Mcap0RecordWriter(memoryWritable); + + await writer.writeChannelInfo({ + channelId: 1, + topicName: "topic", + encoding: "enc", + schemaName: "foo", + schema: "bar", + userData: [], + }); + expect(memoryWritable.buffer).toEqual( + new Uint8Array([ + 3, 40, 0, 0, 0, 0, 0, 0, 0, 1, 0, 5, 0, 0, 0, 116, 111, 112, 105, 99, 3, 0, 0, 0, 101, 110, + 99, 3, 0, 0, 0, 102, 111, 111, 3, 0, 0, 0, 98, 97, 114, 0, 0, 0, 0, 0, 0, 0, 0, + ]), + ); + }); + + it("writes messages", async () => { + const memoryWritable = new MemoryWritable(); + const writer = new Mcap0RecordWriter(memoryWritable); + + await writer.writeMessage({ + channelId: 1, + publishTime: 3n, + recordTime: 5n, + sequence: 7, + messageData: new Uint8Array(), + }); + expect(memoryWritable.buffer).toEqual( + new Uint8Array([ + 4, 22, 0, 0, 0, 0, 0, 0, 0, 1, 0, 7, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, + 0, + ]), + ); + }); +}); diff --git a/typescript/src/v0/Mcap0RecordWriter.ts b/typescript/src/v0/Mcap0RecordWriter.ts new file mode 100644 index 0000000000..49e2cdc46b --- /dev/null +++ b/typescript/src/v0/Mcap0RecordWriter.ts @@ -0,0 +1,116 @@ +import { BufferedWriter } from "../common/BufferedWriter"; +import { IWritable } from "../common/IWritable"; +import { MCAP0_MAGIC, Opcode } from "./constants"; +import { ChannelInfo, Header, Footer, Message, Attachment } from "./types"; + +/** + * Mcap0RecordWriter provides methods to serialize mcap records to an IWritable. + * + * It makes no effort to ensure spec compatability on the order of records, this is the responsibility + * of the caller. + * + * Unless you are building your own higher level writer interface, you'll likely want to use one of + * the higher level writer interfaces. + */ +export class Mcap0RecordWriter { + private recordPrefixWriter: BufferedWriter; + private bufferedWriter: BufferedWriter; + private writable: IWritable; + + constructor(writable: IWritable) { + this.recordPrefixWriter = new BufferedWriter(); + this.bufferedWriter = new BufferedWriter(); + this.writable = writable; + } + + async writeMagic(): Promise { + await this.writable.write(new Uint8Array(MCAP0_MAGIC)); + } + + async writeHeader(header: Header): Promise { + this.bufferedWriter.string(header.profile); + this.bufferedWriter.string(header.library); + + const keyValueWriter = new BufferedWriter(); + for (const item of header.metadata) { + const [key, value] = item; + keyValueWriter.string(key); + keyValueWriter.string(value); + } + + this.bufferedWriter.uint32(keyValueWriter.length); + + this.recordPrefixWriter.uint8(Opcode.HEADER); + this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + keyValueWriter.length)); + + await this.recordPrefixWriter.flush(this.writable); + await this.bufferedWriter.flush(this.writable); + await keyValueWriter.flush(this.writable); + } + + async writeFooter(footer: Footer): Promise { + this.recordPrefixWriter.uint8(Opcode.FOOTER); + this.recordPrefixWriter.uint64(12n); // footer is fixed length + this.recordPrefixWriter.uint64(footer.indexOffset); + this.recordPrefixWriter.uint32(footer.indexCrc); + + await this.recordPrefixWriter.flush(this.writable); + } + + async writeChannelInfo(info: ChannelInfo): Promise { + this.bufferedWriter.uint16(info.channelId); + this.bufferedWriter.string(info.topicName); + this.bufferedWriter.string(info.encoding); + this.bufferedWriter.string(info.schemaName); + this.bufferedWriter.string(info.schema); + + const keyValueWriter = new BufferedWriter(); + for (const item of info.userData) { + const [key, value] = item; + keyValueWriter.string(key); + keyValueWriter.string(value); + } + + this.bufferedWriter.uint32(keyValueWriter.length); + + // Add crc to keyValueWriter after adding the length of key/values to the bufferWriter + // This allows the crc to serialize our with the keyValueWriter + keyValueWriter.uint32(0); + + this.recordPrefixWriter.uint8(Opcode.CHANNEL_INFO); + this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + keyValueWriter.length)); + + await this.recordPrefixWriter.flush(this.writable); + await this.bufferedWriter.flush(this.writable); + await keyValueWriter.flush(this.writable); + } + + async writeMessage(message: Message): Promise { + this.bufferedWriter.uint16(message.channelId); + this.bufferedWriter.uint32(message.sequence); + this.bufferedWriter.uint64(message.publishTime); + this.bufferedWriter.uint64(message.recordTime); + + this.recordPrefixWriter.uint8(Opcode.MESSAGE); + this.recordPrefixWriter.uint64( + BigInt(this.bufferedWriter.length + message.messageData.byteLength), + ); + + await this.recordPrefixWriter.flush(this.writable); + await this.bufferedWriter.flush(this.writable); + await this.writable.write(message.messageData); + } + + async writeAttachment(attachment: Attachment): Promise { + this.bufferedWriter.string(attachment.name); + this.bufferedWriter.uint64(attachment.recordTime); + this.bufferedWriter.string(attachment.contentType); + + this.recordPrefixWriter.uint8(Opcode.CHANNEL_INFO); + this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + attachment.data.byteLength)); + + await this.recordPrefixWriter.flush(this.writable); + await this.bufferedWriter.flush(this.writable); + await this.writable.write(attachment.data); + } +} diff --git a/typescript/src/v0/Mcap0StreamReader.test.ts b/typescript/src/v0/Mcap0StreamReader.test.ts new file mode 100644 index 0000000000..bf91a62de9 --- /dev/null +++ b/typescript/src/v0/Mcap0StreamReader.test.ts @@ -0,0 +1,458 @@ +import { crc32 } from "@foxglove/crc"; + +import { TypedMcapRecords } from "."; +import Mcap0StreamReader from "./Mcap0StreamReader"; +import { MCAP0_MAGIC, Opcode } from "./constants"; +import { record, uint64LE, uint32LE, string, uint16LE, crcSuffix, keyValues } from "./testUtils"; + +describe("Mcap0StreamReader", () => { + it("rejects invalid header", () => { + for (let i = 0; i < MCAP0_MAGIC.length - 1; i++) { + const reader = new Mcap0StreamReader(); + const badMagic = MCAP0_MAGIC.slice(); + badMagic[i] = 0x00; + reader.append(new Uint8Array([...badMagic])); + expect(() => reader.nextRecord()).toThrow("Expected MCAP magic"); + } + }); + + it("rejects invalid footer magic", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.FOOTER, [ + ...uint64LE(0x0123456789abcdefn), // index offset + ...uint32LE(0x01234567), // index crc + ]), + ...MCAP0_MAGIC.slice(0, MCAP0_MAGIC.length - 1), + 0x00, + ]), + ); + expect(() => reader.nextRecord()).toThrow("Expected MCAP magic"); + }); + + it("parses empty file", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.FOOTER, [ + ...uint64LE(0x0123456789abcdefn), // index offset + ...uint32LE(0x01234567), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0x0123456789abcdefn, + indexCrc: 0x01234567, + }); + expect(reader.done()).toBe(true); + }); + + it("accepts empty chunks", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(0), // decompressed crc32 + ...string("lz4"), // compression + // no chunk data + ]), + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0n, + indexCrc: 0, + }); + expect(reader.done()).toBe(true); + }); + + it("waits patiently to parse one byte at a time, and rejects new data after read completed", () => { + const reader = new Mcap0StreamReader(); + const data = new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.FOOTER, [ + ...uint64LE(0x0123456789abcdefn), // index offset + ...uint32LE(0x01234567), // index crc + ]), + ...MCAP0_MAGIC, + ]); + for (let i = 0; i < data.length - 1; i++) { + reader.append(new Uint8Array(data.buffer, i, 1)); + expect(reader.nextRecord()).toBeUndefined(); + expect(reader.done()).toBe(false); + } + reader.append(new Uint8Array(data.buffer, data.length - 1, 1)); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0x0123456789abcdefn, + indexCrc: 0x01234567, + }); + expect(reader.done()).toBe(true); + expect(() => reader.append(new Uint8Array([42]))).toThrow("Already done reading"); + }); + + it("rejects extraneous data at end of file", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + ...record(Opcode.FOOTER, [ + ...uint64LE(0x0123456789abcdefn), // index offset + ...uint32LE(0x01234567), // index crc + ]), + ...MCAP0_MAGIC, + 42, + ]), + ); + expect(() => reader.nextRecord()).toThrow("bytes remaining after MCAP footer"); + }); + + it("parses file with empty chunk", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(0), // decompressed crc32 + ...string(""), // compression + // (no chunk data) + ]), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0n, + indexCrc: 0, + }); + expect(reader.done()).toBe(true); + }); + + it("rejects chunk with incomplete record", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record(Opcode.CHUNK, [ + ...uint64LE(1n), // decompressed size + ...uint32LE(crc32(new Uint8Array([Opcode.CHANNEL_INFO]))), // decompressed crc32 + ...string(""), // compression + + Opcode.CHANNEL_INFO, // truncated record + ]), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(() => reader.nextRecord()).toThrow("bytes remaining in chunk"); + }); + + it("rejects message at top level with no prior channel info", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record(Opcode.MESSAGE, [ + ...uint16LE(42), // channel id + ...uint64LE(0n), // timestamp + ]), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(() => reader.nextRecord()).toThrow( + "Encountered message on channel 42 without prior channel info", + ); + }); + + it("rejects message in chunk with no prior channel info", () => { + const message = record(Opcode.MESSAGE, [ + ...uint16LE(42), // channel id + ...uint64LE(0n), // timestamp + ]); + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(message)), // decompressed crc32 + ...string(""), // compression + ...message, + ]), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(() => reader.nextRecord()).toThrow( + "Encountered message on channel 42 without prior channel info", + ); + }); + + it("parses channel info at top level", () => { + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(1), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "ChannelInfo", + channelId: 1, + topicName: "mytopic", + encoding: "utf12", + schemaName: "some data", + schema: "stuff", + userData: [["foo", "bar"]], + } as TypedMcapRecords["ChannelInfo"]); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0n, + indexCrc: 0, + }); + expect(reader.done()).toBe(true); + }); + + it.each([true, false])("parses channel info in chunk (compressed: %s)", (compressed) => { + const channelInfo = record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(1), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ); + const decompressHandlers = { xyz: () => channelInfo }; + const reader = new Mcap0StreamReader(compressed ? { decompressHandlers } : undefined); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(channelInfo)), // decompressed crc32 + ...string(compressed ? "xyz" : ""), // compression + ...(compressed ? new TextEncoder().encode("compressed bytes") : channelInfo), + ]), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "ChannelInfo", + channelId: 1, + topicName: "mytopic", + encoding: "utf12", + schemaName: "some data", + schema: "stuff", + userData: [["foo", "bar"]], + } as TypedMcapRecords["ChannelInfo"]); + expect(reader.nextRecord()).toEqual({ + type: "Footer", + indexOffset: 0n, + indexCrc: 0, + }); + expect(reader.done()).toBe(true); + }); + + describe.each(["unchunked file", "same chunk", "different chunks"] as const)( + "rejects channel info with the same id in %s", + (testType) => { + it.each([ + { + key: "topic", + channelInfo2: record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("XXXXXXXX"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + }, + { + key: "encoding", + channelInfo2: record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("XXXXXXXX"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + }, + { + key: "schema name", + channelInfo2: record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("XXXXXXXX"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + }, + { + key: "schema", + channelInfo2: record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("XXXXXXXX"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ), + }, + { + key: "data", + channelInfo2: record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [ + ["foo", "bar"], + ["baz", "quux"], + ]), // user data + ]), + ), + }, + ])("differing in $key", ({ channelInfo2 }) => { + const channelInfo = record( + Opcode.CHANNEL_INFO, + crcSuffix([ + ...uint16LE(42), // channel id + ...string("mytopic"), // topic + ...string("utf12"), // encoding + ...string("some data"), // schema name + ...string("stuff"), // schema + ...keyValues(string, string, [["foo", "bar"]]), // user data + ]), + ); + const reader = new Mcap0StreamReader(); + reader.append( + new Uint8Array([ + ...MCAP0_MAGIC, + + ...(testType === "unchunked file" + ? [...channelInfo, ...channelInfo2] + : testType === "same chunk" + ? record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(new Uint8Array([...channelInfo, ...channelInfo2]))), // decompressed crc32 + ...string(""), // compression + ...channelInfo, + ...channelInfo2, + ]) + : testType === "different chunks" + ? [ + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(new Uint8Array(channelInfo))), // decompressed crc32 + ...string(""), // compression + ...channelInfo, + ]), + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(new Uint8Array(channelInfo2))), // decompressed crc32 + ...string(""), // compression + ...channelInfo2, + ]), + ] + : []), + + ...record(Opcode.FOOTER, [ + ...uint64LE(0n), // index offset + ...uint32LE(0), // index crc + ]), + ...MCAP0_MAGIC, + ]), + ); + expect(reader.nextRecord()).toEqual({ + type: "ChannelInfo", + channelId: 42, + topicName: "mytopic", + encoding: "utf12", + schemaName: "some data", + schema: "stuff", + userData: [["foo", "bar"]], + } as TypedMcapRecords["ChannelInfo"]); + expect(() => reader.nextRecord()).toThrow("differing channel infos for 42"); + }); + }, + ); +}); diff --git a/typescript/src/v0/Mcap0StreamReader.ts b/typescript/src/v0/Mcap0StreamReader.ts new file mode 100644 index 0000000000..c1de4802c7 --- /dev/null +++ b/typescript/src/v0/Mcap0StreamReader.ts @@ -0,0 +1,217 @@ +import { crc32 } from "@foxglove/crc"; + +import { DecompressHandlers, TypedMcapRecords } from "."; +import StreamBuffer from "../common/StreamBuffer"; +import { MCAP0_MAGIC } from "./constants"; +import { parseMagic, parseRecord } from "./parse"; +import { McapStreamReader, TypedMcapRecord } from "./types"; + +type McapReaderOptions = { + /** + * When set to true, Chunk records will be returned from `nextRecord()`. Chunk contents will still + * be processed after each chunk record itself. + */ + includeChunks?: boolean; + + /** + * When a compressed chunk is encountered, the entry in `decompressHandlers` corresponding to the + * compression will be called to decompress the chunk data. + */ + decompressHandlers?: DecompressHandlers; + + /** + * When set to true (the default), chunk CRCs will be validated. Set to false to improve performance. + */ + validateCrcs?: boolean; +}; + +/** + * A streaming reader for MCAP files. + * + * Usage example: + * ``` + * const reader = new Mcap0StreamReader(); + * stream.on("data", (data) => { + * try { + * reader.append(data); + * for (let record; (record = reader.nextRecord()); ) { + * // process available records + * } + * } catch (e) { + * // handle errors + * } + * }); + * ``` + */ +export default class Mcap0StreamReader implements McapStreamReader { + private buffer = new StreamBuffer(MCAP0_MAGIC.length * 2); + private decompressHandlers; + private includeChunks; + private validateCrcs; + private doneReading = false; + private generator = this.read(); + + constructor({ + includeChunks = false, + decompressHandlers = {}, + validateCrcs = true, + }: McapReaderOptions = {}) { + this.includeChunks = includeChunks; + this.decompressHandlers = decompressHandlers; + this.validateCrcs = validateCrcs; + } + + /** @returns True if a valid, complete mcap file has been parsed. */ + done(): boolean { + return this.doneReading; + } + + /** @returns The number of bytes that have been received by `append()` but not yet parsed. */ + bytesRemaining(): number { + return this.buffer.bytesRemaining(); + } + + /** + * Provide the reader with newly received bytes for it to process. After calling this function, + * call `nextRecord()` again to parse any records that are now available. + */ + append(data: Uint8Array): void { + if (this.doneReading) { + throw new Error("Already done reading"); + } + this.buffer.append(data); + } + + /** + * Read the next record from the stream if possible. If not enough data is available to parse a + * complete record, or if the reading has terminated with a valid footer, returns undefined. + * + * This function may throw any errors encountered during parsing. If an error is thrown, the + * reader is in an unspecified state and should no longer be used. + */ + nextRecord(): TypedMcapRecord | undefined { + if (this.doneReading) { + return undefined; + } + const result = this.generator.next(); + if (result.done === true) { + this.doneReading = true; + } + return result.value; + } + + private *read(): Generator { + const channelInfosById = new Map(); + { + let magic, usedBytes; + while ((({ magic, usedBytes } = parseMagic(this.buffer.view, 0)), !magic)) { + yield; + } + this.buffer.consume(usedBytes); + } + + for (;;) { + let record; + { + let usedBytes; + while ( + (({ record, usedBytes } = parseRecord({ + view: this.buffer.view, + startOffset: 0, + channelInfosById, + validateCrcs: this.validateCrcs, + })), + !record) + ) { + yield; + } + this.buffer.consume(usedBytes); + } + switch (record.type) { + case "Unknown": + break; + case "Header": + case "ChannelInfo": + case "Message": + case "MessageIndex": + case "ChunkIndex": + case "Attachment": + case "AttachmentIndex": + case "Statistics": + yield record; + break; + + case "Chunk": { + if (this.includeChunks) { + yield record; + } + let buffer = record.records; + if (record.compression !== "" && buffer.byteLength > 0) { + const decompress = this.decompressHandlers[record.compression]; + if (!decompress) { + throw new Error(`Unsupported compression ${record.compression}`); + } + buffer = decompress(buffer, record.uncompressedSize); + } + if (this.validateCrcs && record.uncompressedCrc !== 0) { + const chunkCrc = crc32(buffer); + if (chunkCrc !== record.uncompressedCrc) { + throw new Error( + `Incorrect chunk CRC ${chunkCrc} (expected ${record.uncompressedCrc})`, + ); + } + } + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength); + let chunkOffset = 0; + for ( + let chunkResult; + (chunkResult = parseRecord({ + view, + startOffset: chunkOffset, + channelInfosById, + validateCrcs: this.validateCrcs, + })), + chunkResult.record; + chunkOffset += chunkResult.usedBytes + ) { + switch (chunkResult.record.type) { + case "Unknown": + break; + case "Header": + case "Footer": + case "Chunk": + case "MessageIndex": + case "ChunkIndex": + case "Attachment": + case "AttachmentIndex": + case "Statistics": + throw new Error(`${chunkResult.record.type} record not allowed inside a chunk`); + case "ChannelInfo": + case "Message": + yield chunkResult.record; + break; + } + } + if (chunkOffset !== buffer.byteLength) { + throw new Error(`${buffer.byteLength - chunkOffset} bytes remaining in chunk`); + } + break; + } + case "Footer": + { + let magic, usedBytes; + while ((({ magic, usedBytes } = parseMagic(this.buffer.view, 0)), !magic)) { + yield; + } + this.buffer.consume(usedBytes); + } + if (this.buffer.bytesRemaining() !== 0) { + throw new Error( + `${this.buffer.bytesRemaining()} bytes remaining after MCAP footer and trailing magic`, + ); + } + return record; + } + } + } +} diff --git a/typescript/src/v0/Mcap0UnindexedWriter.ts b/typescript/src/v0/Mcap0UnindexedWriter.ts new file mode 100644 index 0000000000..6fdd82fa4c --- /dev/null +++ b/typescript/src/v0/Mcap0UnindexedWriter.ts @@ -0,0 +1,61 @@ +import { IWritable } from "../common/IWritable"; +import { Mcap0RecordWriter } from "./Mcap0RecordWriter"; +import { ChannelInfo, Message, Header, Attachment } from "./types"; + +export class Mcap0UnindexedWriter { + private recordWriter: Mcap0RecordWriter; + + private channelInfos = new Map(); + private writtenChannelIds = new Set(); + + constructor(writable: IWritable) { + this.recordWriter = new Mcap0RecordWriter(writable); + } + + async start(header: Header): Promise { + await this.recordWriter.writeMagic(); + await this.recordWriter.writeHeader(header); + } + + async end(): Promise { + await this.recordWriter.writeFooter({ + indexOffset: 0n, + indexCrc: 0, + }); + await this.recordWriter.writeMagic(); + } + + /** + * Add channel info and return a generated channel id. The channel id is used when adding messages. + */ + async registerChannel(info: Omit): Promise { + const channelId = this.channelInfos.size + 1; + this.channelInfos.set(channelId, { + ...info, + channelId, + }); + + return channelId; + } + + async addMessage(message: Message): Promise { + // write out channel id if we have not yet done so + if (!this.writtenChannelIds.has(message.channelId)) { + const channelInfo = this.channelInfos.get(message.channelId); + if (!channelInfo) { + throw new Error( + `Mcap0UnindexedWriter#addMessage failed: missing channel info for id ${message.channelId}`, + ); + } + + await this.recordWriter.writeChannelInfo(channelInfo); + this.writtenChannelIds.add(message.channelId); + } + + await this.recordWriter.writeMessage(message); + } + + async addAttachment(attachment: Attachment): Promise { + await this.recordWriter.writeAttachment(attachment); + } +} diff --git a/typescript/src/v0/Reader.ts b/typescript/src/v0/Reader.ts new file mode 100644 index 0000000000..992c32d726 --- /dev/null +++ b/typescript/src/v0/Reader.ts @@ -0,0 +1,102 @@ +import { getBigUint64 } from "../common/getBigUint64"; + +export default class Reader { + private view: DataView; + offset: number; + private textDecoder = new TextDecoder(); + + constructor(view: DataView, offset = 0) { + this.view = view; + this.offset = offset; + } + + uint8(): number { + const value = this.view.getUint8(this.offset); + this.offset += 1; + return value; + } + + uint16(): number { + const value = this.view.getUint16(this.offset, true); + this.offset += 2; + return value; + } + + uint32(): number { + const value = this.view.getUint32(this.offset, true); + this.offset += 4; + return value; + } + + uint64(): bigint { + const value = getBigUint64.call(this.view, this.offset, true); + this.offset += 8; + return value; + } + + string(): string { + const length = this.uint32(); + if (this.offset + length > this.view.byteLength) { + throw new Error(`String length ${length} exceeds bounds of buffer`); + } + const value = this.textDecoder.decode( + new Uint8Array(this.view.buffer, this.view.byteOffset + this.offset, length), + ); + this.offset += length; + return value; + } + + keyValuePairs(readKey: (reader: Reader) => K, readValue: (reader: Reader) => V): [K, V][] { + const length = this.uint32(); + if (this.offset + length > this.view.byteLength) { + throw new Error(`Key-value pairs length ${length} exceeds bounds of buffer`); + } + const result: [K, V][] = []; + const endOffset = this.offset + length; + try { + while (this.offset < endOffset) { + result.push([readKey(this), readValue(this)]); + } + } catch (err) { + throw new Error(`Error reading key-value pairs: ${(err as Error).message}`); + } + if (this.offset !== endOffset) { + throw new Error( + `Key-value pairs length (${ + this.offset - endOffset + length + }) greater than expected (${length})`, + ); + } + return result; + } + + map(readKey: (reader: Reader) => K, readValue: (reader: Reader) => V): Map { + const length = this.uint32(); + if (this.offset + length > this.view.byteLength) { + throw new Error(`Map length ${length} exceeds bounds of buffer`); + } + const result = new Map(); + const endOffset = this.offset + length; + try { + while (this.offset < endOffset) { + const key = readKey(this); + const value = readValue(this); + const existingValue = result.get(key); + if (existingValue != undefined) { + throw new Error( + `Duplicate key ${String(key)} (${String(existingValue)} vs ${String(value)})`, + ); + } + result.set(key, value); + } + } catch (err) { + throw new Error(`Error reading map: ${(err as Error).message}`); + } + if (this.offset !== endOffset) { + throw new Error( + `Map length (${this.offset - endOffset + length}) greater than expected (${length})`, + ); + } + return result; + } +} diff --git a/typescript/src/v0/constants.ts b/typescript/src/v0/constants.ts new file mode 100644 index 0000000000..45a180b77c --- /dev/null +++ b/typescript/src/v0/constants.ts @@ -0,0 +1,21 @@ +/** Array.from("\x89MCAP0\r\n", (c) => c.charCodeAt(0)) */ +export const MCAP0_MAGIC = Object.freeze([137, 77, 67, 65, 80, 48, 13, 10]); + +export enum Opcode { + MIN = 0x01, + HEADER = 0x01, + FOOTER = 0x02, + CHANNEL_INFO = 0x03, + MESSAGE = 0x04, + CHUNK = 0x05, + MESSAGE_INDEX = 0x06, + CHUNK_INDEX = 0x07, + ATTACHMENT = 0x08, + ATTACHMENT_INDEX = 0x09, + STATISTICS = 0x0a, + MAX = 0x0a, +} + +export function isKnownOpcode(opcode: number): opcode is Opcode { + return opcode >= Opcode.MIN && opcode <= Opcode.MAX; +} diff --git a/typescript/src/v0/index.ts b/typescript/src/v0/index.ts new file mode 100644 index 0000000000..c5305a0e24 --- /dev/null +++ b/typescript/src/v0/index.ts @@ -0,0 +1,4 @@ +export * from "../common/IWritable"; +export * from "./types"; +export * from "./Mcap0RecordWriter"; +export * from "./Mcap0UnindexedWriter"; diff --git a/typescript/src/v0/parse.ts b/typescript/src/v0/parse.ts new file mode 100644 index 0000000000..6ab33c015a --- /dev/null +++ b/typescript/src/v0/parse.ts @@ -0,0 +1,332 @@ +import { crc32 } from "@foxglove/crc"; +import { isEqual } from "lodash"; + +import { TypedMcapRecords } from "."; +import Reader from "./Reader"; +import { isKnownOpcode, MCAP0_MAGIC, Opcode } from "./constants"; +import { McapMagic, TypedMcapRecord } from "./types"; + +/** + * Parse a MCAP magic string at `startOffset` in `view`. + */ +export function parseMagic( + view: DataView, + startOffset: number, +): { magic: McapMagic; usedBytes: number } | { magic?: undefined; usedBytes: 0 } { + if (startOffset + MCAP0_MAGIC.length > view.byteLength) { + return { usedBytes: 0 }; + } + if (!MCAP0_MAGIC.every((val, i) => val === view.getUint8(startOffset + i))) { + throw new Error( + `Expected MCAP magic '${MCAP0_MAGIC.map((val) => val.toString(16).padStart(2, "0")).join( + " ", + )}', found '${Array.from(MCAP0_MAGIC, (_, i) => + view.getUint8(i).toString(16).padStart(2, "0"), + ).join(" ")}'`, + ); + } + return { + magic: { specVersion: "0" }, + usedBytes: MCAP0_MAGIC.length, + }; +} + +/** + * Parse a MCAP record beginning at `startOffset` in `view`. + * + * @param channelInfosById Used to track ChannelInfo objects across calls to `parseRecord` and + * associate them with newly parsed Message records. + */ +export function parseRecord({ + view, + startOffset, + channelInfosById, + validateCrcs, +}: { + view: DataView; + startOffset: number; + channelInfosById: Map; + validateCrcs: boolean; +}): { record: TypedMcapRecord; usedBytes: number } | { record?: undefined; usedBytes: 0 } { + if (startOffset + /*opcode*/ 1 + /*record length*/ 8 >= view.byteLength) { + return { usedBytes: 0 }; + } + const headerReader = new Reader(view, startOffset); + + const opcode = headerReader.uint8(); + + const recordLength = headerReader.uint64(); + if (recordLength > Number.MAX_SAFE_INTEGER) { + throw new Error(`Record length ${recordLength} is too large`); + } + const recordEndOffset = headerReader.offset + Number(recordLength); + if (recordEndOffset > view.byteLength) { + return { usedBytes: 0 }; + } + + if (!isKnownOpcode(opcode)) { + const record: TypedMcapRecord = { + type: "Unknown", + opcode, + data: new Uint8Array( + view.buffer, + view.byteOffset + headerReader.offset, + Number(recordLength), + ), + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + + const recordView = new DataView( + view.buffer, + view.byteOffset + headerReader.offset, + Number(recordLength), + ); + const reader = new Reader(recordView); + + switch (opcode) { + case Opcode.HEADER: { + const profile = reader.string(); + const library = reader.string(); + const metadata = reader.keyValuePairs( + (r) => r.string(), + (r) => r.string(), + ); + const record: TypedMcapRecord = { type: "Header", profile, library, metadata }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + + case Opcode.FOOTER: { + const indexOffset = reader.uint64(); + const indexCrc = reader.uint32(); + const record: TypedMcapRecord = { type: "Footer", indexOffset, indexCrc }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + + case Opcode.CHANNEL_INFO: { + const channelId = reader.uint16(); + const topicName = reader.string(); + const encoding = reader.string(); + const schemaName = reader.string(); + const schema = reader.string(); + const userData = reader.keyValuePairs( + (r) => r.string(), + (r) => r.string(), + ); + const crcLength = reader.offset; + const expectedCrc = reader.uint32(); + if (validateCrcs && expectedCrc !== 0) { + const actualCrc = crc32(new DataView(recordView.buffer, recordView.byteOffset, crcLength)); + if (actualCrc !== expectedCrc) { + throw new Error( + `Channel Info CRC32 mismatch: expected ${expectedCrc}, actual ${actualCrc}`, + ); + } + } + + const record: TypedMcapRecord = { + type: "ChannelInfo", + channelId, + topicName, + encoding, + schemaName, + schema, + userData, + }; + const existingInfo = channelInfosById.get(channelId); + if (existingInfo) { + if (!isEqual(existingInfo, record)) { + throw new Error(`differing channel infos for ${record.channelId}`); + } + return { + record: existingInfo, + usedBytes: recordEndOffset - startOffset, + }; + } else { + channelInfosById.set(channelId, record); + return { record, usedBytes: recordEndOffset - startOffset }; + } + } + + case Opcode.MESSAGE: { + const channelId = reader.uint16(); + const channelInfo = channelInfosById.get(channelId); + if (!channelInfo) { + throw new Error(`Encountered message on channel ${channelId} without prior channel info`); + } + const sequence = reader.uint32(); + const publishTime = reader.uint64(); + const recordTime = reader.uint64(); + const messageData = new Uint8Array( + recordView.buffer.slice( + recordView.byteOffset + reader.offset, + recordView.byteOffset + recordView.byteLength, + ), + ); + const record: TypedMcapRecord = { + type: "Message", + channelId, + sequence, + publishTime, + recordTime, + messageData, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + + case Opcode.CHUNK: { + const uncompressedSize = reader.uint64(); + const uncompressedCrc = reader.uint32(); + const compression = reader.string(); + const records = new Uint8Array( + recordView.buffer.slice( + recordView.byteOffset + reader.offset, + recordView.byteOffset + recordView.byteLength, + ), + ); + const record: TypedMcapRecord = { + type: "Chunk", + compression, + uncompressedSize, + uncompressedCrc, + records, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + + case Opcode.MESSAGE_INDEX: { + const channelId = reader.uint16(); + const count = reader.uint32(); + const records = reader.keyValuePairs( + (r) => r.uint64(), + (r) => r.uint64(), + ); + const crcLength = reader.offset; + const expectedCrc = reader.uint32(); + if (validateCrcs && expectedCrc !== 0) { + const actualCrc = crc32(new DataView(recordView.buffer, recordView.byteOffset, crcLength)); + if (actualCrc !== expectedCrc) { + throw new Error( + `Message Index CRC32 mismatch: expected ${expectedCrc}, actual ${actualCrc}`, + ); + } + } + const record: TypedMcapRecord = { + type: "MessageIndex", + channelId, + count, + records, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + case Opcode.CHUNK_INDEX: { + const startTime = reader.uint64(); + const endTime = reader.uint64(); + const chunkOffset = reader.uint64(); + const messageIndexOffsets = reader.map( + (r) => r.uint16(), + (r) => r.uint64(), + ); + const messageIndexLength = reader.uint64(); + const compression = reader.string(); + const compressedSize = reader.uint64(); + const uncompressedSize = reader.uint64(); + const crcLength = reader.offset; + const expectedCrc = reader.uint32(); + if (validateCrcs && expectedCrc !== 0) { + const actualCrc = crc32(new DataView(recordView.buffer, recordView.byteOffset, crcLength)); + if (actualCrc !== expectedCrc) { + throw new Error( + `Chunk Index CRC32 mismatch: expected ${expectedCrc}, actual ${actualCrc}`, + ); + } + } + const record: TypedMcapRecord = { + type: "ChunkIndex", + startTime, + endTime, + chunkOffset, + messageIndexOffsets, + messageIndexLength, + compression, + compressedSize, + uncompressedSize, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + case Opcode.ATTACHMENT: { + const name = reader.string(); + const recordTime = reader.uint64(); + const contentType = reader.string(); + const dataLen = reader.uint64(); + if (BigInt(recordView.byteOffset + reader.offset) + dataLen > Number.MAX_SAFE_INTEGER) { + throw new Error(`Attachment too large: ${dataLen}`); + } + const data = new Uint8Array( + recordView.buffer.slice( + recordView.byteOffset + reader.offset, + recordView.byteOffset + reader.offset + Number(dataLen), + ), + ); + reader.offset += Number(dataLen); + const crcLength = reader.offset; + const expectedCrc = reader.uint32(); + if (validateCrcs && expectedCrc !== 0) { + const actualCrc = crc32( + new DataView(view.buffer, startOffset + 5, crcLength - (startOffset + 5)), + ); + if (actualCrc !== expectedCrc) { + throw new Error( + `Attachment CRC32 mismatch: expected ${expectedCrc}, actual ${actualCrc}`, + ); + } + } + + const record: TypedMcapRecord = { + type: "Attachment", + name, + recordTime, + contentType, + data, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + case Opcode.ATTACHMENT_INDEX: { + const recordTime = reader.uint64(); + const attachmentSize = reader.uint64(); + const name = reader.string(); + const contentType = reader.string(); + const attachmentOffset = reader.uint64(); + + const record: TypedMcapRecord = { + type: "AttachmentIndex", + recordTime, + attachmentSize, + name, + contentType, + offset: attachmentOffset, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + case Opcode.STATISTICS: { + const messageCount = reader.uint64(); + const channelCount = reader.uint32(); + const attachmentCount = reader.uint32(); + const chunkCount = reader.uint32(); + const channelMessageCounts = reader.map( + (r) => r.uint16(), + (r) => r.uint64(), + ); + + const record: TypedMcapRecord = { + type: "Statistics", + messageCount, + channelCount, + attachmentCount, + chunkCount, + channelMessageCounts, + }; + return { record, usedBytes: recordEndOffset - startOffset }; + } + } +} diff --git a/typescript/src/v0/testUtils.ts b/typescript/src/v0/testUtils.ts new file mode 100644 index 0000000000..036ec22df2 --- /dev/null +++ b/typescript/src/v0/testUtils.ts @@ -0,0 +1,67 @@ +import { crc32 } from "@foxglove/crc"; + +import { Opcode } from "./constants"; + +export function uint16LE(n: number): Uint8Array { + const result = new Uint8Array(2); + new DataView(result.buffer).setUint16(0, n, true); + return result; +} + +export function uint32LE(n: number): Uint8Array { + const result = new Uint8Array(4); + new DataView(result.buffer).setUint32(0, n, true); + return result; +} + +export function uint64LE(n: bigint): Uint8Array { + const result = new Uint8Array(8); + new DataView(result.buffer).setBigUint64(0, n, true); + return result; +} + +export function string(str: string): Uint8Array { + const encoded = new TextEncoder().encode(str); + const result = new Uint8Array(4 + encoded.length); + new DataView(result.buffer).setUint32(0, encoded.length, true); + result.set(encoded, 4); + return result; +} + +export function record(type: Opcode, data: number[]): Uint8Array { + const result = new Uint8Array(1 + 8 + data.length); + result[0] = type; + new DataView(result.buffer).setBigUint64(1, BigInt(data.length), true); + result.set(data, 1 + 8); + return result; +} + +export function keyValues( + serializeK: (_: K) => Uint8Array, + serializeV: (_: V) => Uint8Array, + pairs: [K, V][], +): Uint8Array { + const serialized = pairs.flatMap(([key, value]) => [serializeK(key), serializeV(value)]); + const totalLen = serialized.reduce((total, ser) => total + ser.length, 0); + const result = new Uint8Array(4 + totalLen); + new DataView(result.buffer).setUint32(0, totalLen, true); + let offset = 4; + for (const ser of serialized) { + result.set(ser, offset); + offset += ser.length; + } + return result; +} + +export function crcSuffix(data: number[]): number[] { + const crc = crc32(Uint8Array.from(data)); + return [...data, ...uint32LE(crc)]; +} + +export async function collect(iterable: AsyncIterable): Promise { + const result: T[] = []; + for await (const item of iterable) { + result.push(item); + } + return result; +} diff --git a/typescript/src/v0/types.ts b/typescript/src/v0/types.ts new file mode 100644 index 0000000000..e05684e9b7 --- /dev/null +++ b/typescript/src/v0/types.ts @@ -0,0 +1,105 @@ +export type McapMagic = { + specVersion: "0"; +}; +export type Header = { + profile: string; + library: string; + metadata: [key: string, value: string][]; +}; +export type Footer = { + indexOffset: bigint; + indexCrc: number; +}; +export type ChannelInfo = { + channelId: number; + topicName: string; + encoding: string; + schemaName: string; + schema: string; + userData: [key: string, value: string][]; +}; +export type Message = { + channelId: number; + sequence: number; + publishTime: bigint; + recordTime: bigint; + messageData: Uint8Array; +}; +export type Chunk = { + uncompressedSize: bigint; + uncompressedCrc: number; + compression: string; + records: Uint8Array; +}; +export type MessageIndex = { + channelId: number; + count: number; + records: [recordTime: bigint, offset: bigint][]; +}; +export type ChunkIndex = { + startTime: bigint; + endTime: bigint; + chunkOffset: bigint; + messageIndexOffsets: Map; + messageIndexLength: bigint; + compression: string; + compressedSize: bigint; + uncompressedSize: bigint; +}; +export type Attachment = { + name: string; + recordTime: bigint; + contentType: string; + data: Uint8Array; +}; +export type AttachmentIndex = { + recordTime: bigint; + attachmentSize: bigint; + name: string; + contentType: string; + offset: bigint; +}; +export type Statistics = { + messageCount: bigint; + channelCount: number; + attachmentCount: number; + chunkCount: number; + channelMessageCounts: Map; +}; +export type UnknownRecord = { + opcode: number; + data: Uint8Array; +}; + +export type McapRecords = { + Header: Header; + Footer: Footer; + ChannelInfo: ChannelInfo; + Message: Message; + Chunk: Chunk; + MessageIndex: MessageIndex; + ChunkIndex: ChunkIndex; + Attachment: Attachment; + AttachmentIndex: AttachmentIndex; + Statistics: Statistics; + Unknown: UnknownRecord; +}; + +export type TypedMcapRecords = { + [R in keyof McapRecords]: McapRecords[R] & { type: R }; +}; + +type Values = T[keyof T]; +export type TypedMcapRecord = Values; +export type McapRecord = Values; + +export interface McapStreamReader { + done(): boolean; + bytesRemaining(): number; + append(data: Uint8Array): void; + nextRecord(): TypedMcapRecord | undefined; +} + +export type DecompressHandlers = { + [compression: string]: (buffer: Uint8Array, decompressedSize: bigint) => Uint8Array; +}; diff --git a/yarn.lock b/yarn.lock index 948deae4c5..de9b77d9dc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1990,6 +1990,11 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +heap-js@^2.1.6: + version "2.1.6" + resolved "https://registry.yarnpkg.com/heap-js/-/heap-js-2.1.6.tgz#817021e6d252ad6ba9a11ea65988cc4e6207782d" + integrity sha512-xQxyJg7VcgveAZtY0eAu7iCn+2VCqLBkQoz7G4RnOIEsmP82J/LnRdr0I2Mi/pThkP5tviL8yFq5utE3yOPYpQ== + heap@^0.2.6: version "0.2.6" resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac" From 28620fe3059c39e3b023badafe2f160f06be984a Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Wed, 19 Jan 2022 13:09:57 -0800 Subject: [PATCH 019/635] typescript: add json and protobuf encoding support to validate script (#25) `validate` script now supports `protobuf` and `json` encodings (similar to [ws-protocol](https://github.com/foxglove/ws-protocol)). --- typescript/scripts/validate.ts | 69 +++++++++++++++--------------- typescript/typings/protobufjs.d.ts | 3 ++ 2 files changed, 38 insertions(+), 34 deletions(-) diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index 6a443082a2..537662cd8f 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -1,4 +1,4 @@ -import { parse as parseMessageDefinition, RosMsgDefinition } from "@foxglove/rosmsg"; +import { parse as parseMessageDefinition } from "@foxglove/rosmsg"; import { LazyMessageReader as ROS1LazyMessageReader } from "@foxglove/rosmsg-serialization"; import { MessageReader as ROS2MessageReader } from "@foxglove/rosmsg2-serialization"; import { program } from "commander"; @@ -6,6 +6,8 @@ import { createReadStream } from "fs"; import fs from "fs/promises"; import { isEqual } from "lodash"; import { performance } from "perf_hooks"; +import protobufjs from "protobufjs"; +import { FileDescriptorSet } from "protobufjs/ext/descriptor"; import decompressLZ4 from "wasm-lz4"; import detectVersion, { @@ -92,8 +94,7 @@ async function validate( number, { info: ChannelInfo; - messageDeserializer?: ROS2MessageReader | ROS1LazyMessageReader; - parsedDefinitions?: RosMsgDefinition[]; + messageDeserializer?: (data: ArrayBufferView) => unknown; } >(); @@ -112,27 +113,38 @@ async function validate( } break; } - let parsedDefinitions; - let messageDeserializer; + let messageDeserializer: (data: ArrayBufferView) => unknown; if (record.encoding === "ros1") { - parsedDefinitions = parseMessageDefinition(record.schema); - messageDeserializer = new ROS1LazyMessageReader(parsedDefinitions); + const reader = new ROS1LazyMessageReader(parseMessageDefinition(record.schema)); + messageDeserializer = (data) => { + const size = reader.size(data); + if (size !== data.byteLength) { + throw new Error(`Message size ${size} should match buffer length ${data.byteLength}`); + } + return reader.readMessage(data).toJSON(); + }; } else if (record.encoding === "ros2") { - parsedDefinitions = parseMessageDefinition(record.schema, { - ros2: true, - }); - messageDeserializer = new ROS2MessageReader(parsedDefinitions); + const reader = new ROS2MessageReader( + parseMessageDefinition(record.schema, { + ros2: true, + }), + ); + messageDeserializer = (data) => reader.readMessage(data); } else if (record.encoding === "protobuf") { - messageDeserializer = undefined; - parsedDefinitions = undefined; + const root = protobufjs.Root.fromDescriptor( + FileDescriptorSet.decode(Buffer.from(record.schema, "base64")), + ); + const type = root.lookupType(record.schemaName); + + messageDeserializer = (data) => + type.decode(new Uint8Array(data.buffer, data.byteOffset, data.byteLength)); + } else if (record.encoding === "json") { + const textDecoder = new TextDecoder(); + messageDeserializer = (data) => JSON.parse(textDecoder.decode(data)); } else { throw new Error(`unsupported encoding ${record.encoding}`); } - channelInfoById.set(record.channelId, { - info: record, - messageDeserializer, - parsedDefinitions, - }); + channelInfoById.set(record.channelId, { info: record, messageDeserializer }); break; } @@ -142,23 +154,12 @@ async function validate( throw new Error(`message for channel ${record.channelId} with no prior channel info`); } if (deserialize) { - let message: unknown; - if (channelInfo.messageDeserializer instanceof ROS1LazyMessageReader) { - const size = channelInfo.messageDeserializer.size(record.messageData); - if (size !== record.messageData.byteLength) { - throw new Error( - `Message size ${size} should match buffer length ${record.messageData.byteLength}`, - ); - } - message = channelInfo.messageDeserializer.readMessage(record.messageData).toJSON(); - } else { - if (channelInfo.messageDeserializer == undefined) { - throw new Error( - `No deserializer available for channel id: ${channelInfo.info.channelId} ${channelInfo.info.encoding}`, - ); - } - message = channelInfo.messageDeserializer.readMessage(record.messageData); + if (channelInfo.messageDeserializer == undefined) { + throw new Error( + `No deserializer available for channel id: ${channelInfo.info.channelId} ${channelInfo.info.encoding}`, + ); } + const message = channelInfo.messageDeserializer(record.messageData); if (dump) { log(message); } diff --git a/typescript/typings/protobufjs.d.ts b/typescript/typings/protobufjs.d.ts index 426f994394..8c0445835e 100644 --- a/typescript/typings/protobufjs.d.ts +++ b/typescript/typings/protobufjs.d.ts @@ -8,4 +8,7 @@ declare module "protobufjs" { protoVersion: string, ): protobufjs.Message & descriptor.IFileDescriptorSet; } + declare namespace ReflectionObject { + export const fromDescriptor: (desc: protobufjs.Message) => protobufjs.Root; + } } From 808dcef528f7112f94946b617cf204bbf0a29cd2 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Wed, 19 Jan 2022 14:45:44 -0800 Subject: [PATCH 020/635] fix message index order check --- typescript/src/v0/Mcap0IndexedReader.test.ts | 95 ++++++++++++++++++++ typescript/src/v0/Mcap0IndexedReader.ts | 2 +- 2 files changed, 96 insertions(+), 1 deletion(-) diff --git a/typescript/src/v0/Mcap0IndexedReader.test.ts b/typescript/src/v0/Mcap0IndexedReader.test.ts index fe05fed8c7..9553cdaf04 100644 --- a/typescript/src/v0/Mcap0IndexedReader.test.ts +++ b/typescript/src/v0/Mcap0IndexedReader.test.ts @@ -381,4 +381,99 @@ describe("Mcap0IndexedReader", () => { "Overlapping chunks are not currently supported", ); }); + + it.each<{ records: [bigint, bigint][]; shouldThrow: boolean }>([ + { + records: [ + [0n, 0n], + [0n, 0n], + [0n, 0n], + ], + shouldThrow: false, + }, + { + records: [ + [0n, 0n], + [1n, 0n], + [1n, 0n], + ], + shouldThrow: false, + }, + { + records: [ + [0n, 0n], + [2n, 0n], + [1n, 0n], + ], + shouldThrow: true, + }, + ])( + "requires message index offsets to be in order of recordTime", + async ({ records, shouldThrow }) => { + const data = [ + ...MCAP0_MAGIC, + ...record(Opcode.HEADER, [ + ...string(""), // profile + ...string(""), // library + ...keyValues(string, string, []), // metadata + ]), + ]; + const chunkOffset = BigInt(data.length); + data.push( + ...record(Opcode.CHUNK, [ + ...uint64LE(0n), // decompressed size + ...uint32LE(crc32(new Uint8Array([]))), // decompressed crc32 + ...string(""), // compression + ]), + ); + const messageIndexOffset = BigInt(data.length); + data.push( + ...record( + Opcode.MESSAGE_INDEX, + crcSuffix([ + ...uint16LE(42), // channel id + ...uint32LE(1), // count + ...keyValues(uint64LE, uint64LE, records), // records + ]), + ), + ); + const messageIndexLength = BigInt(data.length) - messageIndexOffset; + const indexOffset = BigInt(data.length); + data.push( + ...record( + Opcode.CHUNK_INDEX, + crcSuffix([ + ...uint64LE(0n), // start time + ...uint64LE(100n), // end time + ...uint64LE(chunkOffset), // offset + ...keyValues(uint16LE, uint64LE, [[42, messageIndexOffset]]), // message index offsets + ...uint64LE(messageIndexLength), // message index length + ...string(""), // compression + ...uint64LE(BigInt(0n)), // compressed size + ...uint64LE(BigInt(0n)), // uncompressed size + ]), + ), + ...record(Opcode.FOOTER, [ + ...uint64LE(indexOffset), // index offset + ...uint32LE(crc32(new Uint8Array(0))), // index crc + ]), + ...MCAP0_MAGIC, + ); + const reader = await Mcap0IndexedReader.Initialize({ + readable: makeReadable(new Uint8Array(data)), + }); + if (shouldThrow) { + // eslint-disable-next-line jest/no-conditional-expect + await expect(collect(reader.readMessages())).rejects.toThrow( + /Message index entries for channel 42 .+ must be sorted by recordTime/, + ); + } else { + // Still fails because messages are not actually present in the chunk + // eslint-disable-next-line jest/no-conditional-expect + await expect(collect(reader.readMessages())).rejects.toThrow( + "Unable to parse record at offset", + ); + } + }, + ); }); diff --git a/typescript/src/v0/Mcap0IndexedReader.ts b/typescript/src/v0/Mcap0IndexedReader.ts index 098d56cc66..8d2a7ea68e 100644 --- a/typescript/src/v0/Mcap0IndexedReader.ts +++ b/typescript/src/v0/Mcap0IndexedReader.ts @@ -306,7 +306,7 @@ export default class Mcap0IndexedReader { (channelIds == undefined || channelIds.has(result.record.channelId)) ) { for (let i = 0; i + 1 < result.record.records.length; i++) { - if (result.record.records[i]![0] >= result.record.records[i + 1]![0]) { + if (result.record.records[i]![0] > result.record.records[i + 1]![0]) { throw new Error( `Message index entries for channel ${result.record.channelId} in chunk at offset ${chunkIndex.chunkOffset} must be sorted by recordTime`, ); From 77cd69f2dd6fa5ea664a50b55119192c3cd84a01 Mon Sep 17 00:00:00 2001 From: John Hurliman Date: Thu, 20 Jan 2022 10:05:07 -0800 Subject: [PATCH 021/635] Skeleton C++ project (#26) --- .github/workflows/ci.yml | 10 ++ .vscode/extensions.json | 4 +- .vscode/settings.json | 6 +- cpp/.clang-format | 30 +++++ cpp/.dockerignore | 10 ++ cpp/.gitignore | 9 ++ cpp/Makefile | 17 +++ cpp/README.md | 6 + cpp/dev.Dockerfile | 51 +++++++++ cpp/docker-compose.yml | 18 +++ cpp/examples/CMakeLists.txt | 8 ++ cpp/examples/bag2mcap.cpp | 15 +++ cpp/examples/conanfile.py | 12 ++ cpp/mcap/LICENSE | 202 +++++++++++++++++++++++++++++++++ cpp/mcap/conanfile.py | 28 +++++ cpp/mcap/include/mcap/mcap.hpp | 111 ++++++++++++++++++ cpp/scripts/format.py | 64 +++++++++++ 17 files changed, 599 insertions(+), 2 deletions(-) create mode 100644 cpp/.clang-format create mode 100644 cpp/.dockerignore create mode 100644 cpp/.gitignore create mode 100644 cpp/Makefile create mode 100644 cpp/README.md create mode 100644 cpp/dev.Dockerfile create mode 100644 cpp/docker-compose.yml create mode 100644 cpp/examples/CMakeLists.txt create mode 100644 cpp/examples/bag2mcap.cpp create mode 100644 cpp/examples/conanfile.py create mode 100644 cpp/mcap/LICENSE create mode 100644 cpp/mcap/conanfile.py create mode 100644 cpp/mcap/include/mcap/mcap.hpp create mode 100644 cpp/scripts/format.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ddbcc7e34f..5ffac5dd20 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,6 +19,16 @@ jobs: - run: yarn install --frozen-lockfile - run: yarn lint:docs + cpp: + runs-on: ubuntu-latest + defaults: + run: + working-directory: cpp + steps: + - uses: actions/checkout@v2 + - run: make format-check + - run: make build + typescript: runs-on: ubuntu-latest steps: diff --git a/.vscode/extensions.json b/.vscode/extensions.json index ea892e04c1..541c99ba98 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -2,6 +2,8 @@ "recommendations": [ "dbaeumer.vscode-eslint", "esbenp.prettier-vscode", - "orta.vscode-jest" + "orta.vscode-jest", + "ms-vscode.cpptools-extension-pack", + "ms-vscode-remote.vscode-remote-extensionpack" ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index e214c29483..8ffb8f0785 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,10 +1,14 @@ { "editor.formatOnSave": true, + "typescript.tsdk": "node_modules/typescript/lib", "prettier.prettierPath": "./node_modules/prettier", "eslint.packageManager": "yarn", "eslint.options": { "reportUnusedDisableDirectives": true }, - "jest.jestCommandLine": "yarn workspace @foxglove/mcap test" + "jest.jestCommandLine": "yarn workspace @foxglove/mcap test", + + // https://github.com/microsoft/vscode-cpptools/issues/722 + "C_Cpp.autoAddFileAssociations": false } diff --git a/cpp/.clang-format b/cpp/.clang-format new file mode 100644 index 0000000000..7a57d1f2d8 --- /dev/null +++ b/cpp/.clang-format @@ -0,0 +1,30 @@ +--- +Language: Cpp +Standard: c++17 +BasedOnStyle: Google + +AllowShortFunctionsOnASingleLine: Empty +AllowShortLambdasOnASingleLine: Empty +AccessModifierOffset: -2 +TabWidth: 2 +ContinuationIndentWidth: 2 +UseTab: Never +BreakConstructorInitializers: BeforeComma +ColumnLimit: 100 +ConstructorInitializerAllOnOneLineOrOnePerLine: false +DerivePointerAlignment: false +FixNamespaceComments: true +PointerAlignment: Left +ReflowComments: true +SortIncludes: true + +IncludeCategories: + - Regex: "^> /etc/apt/sources.list && \ + curl https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - &&\ + apt-get update && \ + apt-get install -y --no-install-recommends --no-install-suggests \ + clang-13 \ + clang-format-13 + +RUN update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-13 100 +RUN update-alternatives --install /usr/bin/git-clang-format git-clang-format /usr/bin/git-clang-format-13 100 + +ENV CC=clang-13 +ENV CXX=clang++-13 + +WORKDIR /src + +FROM base as build +RUN pip --no-cache-dir install conan + +ENV CONAN_V2_MODE=1 +RUN conan config init +RUN conan profile update settings.compiler.cppstd=17 default + +FROM build as build_bag2mcap +COPY ./examples /src/examples/ +COPY ./mcap /src/mcap/ +COPY ./.clang-format /src/ +RUN conan editable add ./mcap mcap/0.0.1 +RUN conan install examples --install-folder examples/build --build=zlib --build=zstd + +FROM build_bag2mcap AS bag2mcap +COPY --from=build_bag2mcap /src /src +COPY --from=build_bag2mcap /src/examples/build/ /src/examples/build/ +RUN conan build examples --build-folder examples/build +ENTRYPOINT ["examples/build/bin/bag2mcap"] diff --git a/cpp/docker-compose.yml b/cpp/docker-compose.yml new file mode 100644 index 0000000000..c93f493d5e --- /dev/null +++ b/cpp/docker-compose.yml @@ -0,0 +1,18 @@ +services: + base: + build: + context: . + dockerfile: dev.Dockerfile + target: base + + build: + build: + context: . + dockerfile: dev.Dockerfile + target: build + + bag2mcap: + build: + context: . + dockerfile: dev.Dockerfile + target: bag2mcap diff --git a/cpp/examples/CMakeLists.txt b/cpp/examples/CMakeLists.txt new file mode 100644 index 0000000000..f652dc6a27 --- /dev/null +++ b/cpp/examples/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.1) +project(McapExamples CXX) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup() + +add_executable(bag2mcap bag2mcap.cpp) +target_link_libraries(bag2mcap ${CONAN_LIBS}) diff --git a/cpp/examples/bag2mcap.cpp b/cpp/examples/bag2mcap.cpp new file mode 100644 index 0000000000..a2185f5d00 --- /dev/null +++ b/cpp/examples/bag2mcap.cpp @@ -0,0 +1,15 @@ +#include + +#include + +int main() { + mcap::Message msg; + msg.channelId = 1; + msg.sequence = 2; + msg.publishTime = 3; + msg.recordTime = 4; + msg.data.push_back(5); + + std::cout << "msg.channelId = " << msg.channelId << "\n"; + return 0; +} diff --git a/cpp/examples/conanfile.py b/cpp/examples/conanfile.py new file mode 100644 index 0000000000..acb3541234 --- /dev/null +++ b/cpp/examples/conanfile.py @@ -0,0 +1,12 @@ +from conans import ConanFile, CMake + + +class McapExamplesConan(ConanFile): + settings = "os", "compiler", "build_type", "arch" + generators = "cmake" + requires = "mcap/0.0.1" + + def build(self): + cmake = CMake(self) + cmake.configure() + cmake.build() diff --git a/cpp/mcap/LICENSE b/cpp/mcap/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/cpp/mcap/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/cpp/mcap/conanfile.py b/cpp/mcap/conanfile.py new file mode 100644 index 0000000000..cf930f511c --- /dev/null +++ b/cpp/mcap/conanfile.py @@ -0,0 +1,28 @@ +from conans import ConanFile, tools + + +class McapConan(ConanFile): + name = "mcap" + version = "0.0.1" + url = "https://github.com/foxglove/mcap" + homepage = "https://github.com/foxglove/mcap" + description = "A C++ implementation of the MCAP file format" + license = "MIT" + topics = ("mcap", "serialization", "deserialization", "recording") + + settings = ("os", "compiler", "build_type", "arch") + requires = ("zlib/1.2.11", "zstd/1.5.1") + generators = "cmake" + + def validate(self): + tools.check_min_cppstd(self, "17") + + def configure(self): + pass + + def package(self): + self.copy(pattern="LICENSE", dst="licenses") + self.copy("include/*") + + def package_id(self): + self.info.header_only() diff --git a/cpp/mcap/include/mcap/mcap.hpp b/cpp/mcap/include/mcap/mcap.hpp new file mode 100644 index 0000000000..bf52e25dfa --- /dev/null +++ b/cpp/mcap/include/mcap/mcap.hpp @@ -0,0 +1,111 @@ +#pragma once + +#include +#include +#include + +namespace mcap { + +constexpr char SpecVersionChar = '0'; + +using ChannelId = uint16_t; +using Timestamp = uint64_t; +using ByteOffset = uint64_t; +using KeyValueMap = std::unordered_map; +using ByteArray = std::vector; + +enum struct OpCode : uint8_t { + Header = 0x01, + Footer = 0x02, + ChannelInfo = 0x03, + Message = 0x04, + Chunk = 0x05, + MessageIndex = 0x06, + ChunkIndex = 0x07, + Attachment = 0x08, + AttachmentIndex = 0x09, + Statistics = 0x0A, +}; + +struct Header { + std::string profile; + std::string library; + mcap::KeyValueMap metadata; +}; + +struct Footer { + mcap::ByteOffset indexOffset; + uint32_t indexCrc; +}; + +struct ChannelInfo { + mcap::ChannelId channelId; + std::string topicName; + std::string encoding; + std::string schemaName; + std::string schema; + mcap::KeyValueMap userData; +}; + +struct Message { + mcap::ChannelId channelId; + uint32_t sequence; + mcap::Timestamp publishTime; + mcap::Timestamp recordTime; + mcap::ByteArray data; +}; + +struct Chunk { + uint64_t uncompressedSize; + uint32_t uncompressedCrc; + std::string compression; + mcap::ByteArray records; +}; + +struct MessageIndex { + mcap::ChannelId channelId; + uint32_t count; + std::unordered_map records; +}; + +struct ChunkIndex { + mcap::Timestamp startTime; + mcap::Timestamp endTime; + mcap::ByteOffset chunkOffset; + std::unordered_map messageIndexOffsets; + uint64_t messageIndexLength; + std::string compression; + uint64_t compressedSize; + uint64_t uncompressedSized; + uint32_t crc; +}; + +struct Attachment { + std::string name; + mcap::Timestamp recordTime; + std::string contentType; + mcap::ByteArray data; +}; + +struct AttachmentIndex { + mcap::Timestamp recordTime; + uint64_t attachmentSize; + std::string name; + std::string contentType; + mcap::ByteOffset offset; +}; + +struct Statistics { + uint64_t messageCount; + uint32_t channelCount; + uint32_t attachmentCount; + uint32_t chunkCount; + std::unordered_map channelMessageCounts; +}; + +struct UnknownRecord { + uint8_t opcode; + mcap::ByteArray data; +}; + +} // namespace mcap diff --git a/cpp/scripts/format.py b/cpp/scripts/format.py new file mode 100644 index 0000000000..8330b1a9c1 --- /dev/null +++ b/cpp/scripts/format.py @@ -0,0 +1,64 @@ +import argparse +import difflib +import os +import subprocess +import sys +from typing import List + +IGNORE_DIRS = ["build"] +EXTENSIONS = [".cpp", ".hpp"] + + +def main(dirs: List[str], fix: bool): + changed_paths: List[str] = [] + for root in dirs: + for dirpath, dirnames, filenames in os.walk(root): + # Filter out directories to skip + dirnames[:] = filter(lambda d: d not in IGNORE_DIRS, dirnames) + + for name in filenames: + path = os.path.join(dirpath, name) + if any(name.endswith(ext) for ext in EXTENSIONS): + if fix: + subprocess.check_call(["clang-format", "-i", path]) + continue + + stdout = ( + subprocess.check_output(["clang-format", path]) + .decode("utf-8") + .splitlines() + ) + + with open(path, "r") as f: + orig = [line.rstrip("\n") for line in f] + diff = difflib.unified_diff( + orig, + stdout, + fromfile=path, + tofile=f"clang-format {path}", + lineterm="", + ) + had_diff = False + for line in diff: + had_diff = True + print(line) + if had_diff: + changed_paths.append(path) + print("\n") + + if changed_paths: + print(f"{len(changed_paths)} files need to be formatted:") + for path in changed_paths: + print(f" {path}") + return 1 + return 0 + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Run clang-format and display changed files." + ) + parser.add_argument("dirs", help="List of directories to search", nargs="+") + parser.add_argument("--fix", action="store_true") + args = parser.parse_args() + sys.exit(main(**vars(args))) From caf903fb4e49e7863a0477cb6ad983995e997da1 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Thu, 20 Jan 2022 13:19:34 -0800 Subject: [PATCH 022/635] Fix validate script to process channel info before readMessages() on indexed files (#29) Fixes validate on indexed files. --- typescript/scripts/validate.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index 537662cd8f..34c6cb03c8 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -237,6 +237,9 @@ async function validate( }, decompressHandlers, }); + for (const channelInfo of reader.channelInfosById.values()) { + processRecord(channelInfo); + } for await (const message of reader.readMessages()) { processRecord(message); } From c5a8e4717867d07f4c667f74adcffacf58859000 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Thu, 20 Jan 2022 14:32:19 -0800 Subject: [PATCH 023/635] typescript: Keep references to attachment indexes and statistics in indexed reader, export v0 types (#30) --- typescript/src/index.ts | 6 +++++- typescript/src/v0/Mcap0IndexedReader.ts | 10 ++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/typescript/src/index.ts b/typescript/src/index.ts index 68879c2d0d..640b8b2d4c 100644 --- a/typescript/src/index.ts +++ b/typescript/src/index.ts @@ -1,3 +1,7 @@ export { default as McapPre0Reader } from "./pre0/McapPre0Reader"; -export * as Pre0Types from "./pre0/types"; export { default as McapPre0Writer } from "./pre0/McapPre0Writer"; +export * as McapPre0Types from "./pre0/types"; + +export { default as Mcap0IndexedReader } from "./v0/Mcap0IndexedReader"; +export { default as Mcap0StreamReader } from "./v0/Mcap0StreamReader"; +export * as Mcap0Types from "./v0/types"; diff --git a/typescript/src/v0/Mcap0IndexedReader.ts b/typescript/src/v0/Mcap0IndexedReader.ts index 8d2a7ea68e..5badf19636 100644 --- a/typescript/src/v0/Mcap0IndexedReader.ts +++ b/typescript/src/v0/Mcap0IndexedReader.ts @@ -9,7 +9,9 @@ import { DecompressHandlers, TypedMcapRecords } from "./types"; export default class Mcap0IndexedReader { readonly chunkIndexes: readonly TypedMcapRecords["ChunkIndex"][]; + readonly attachmentIndexes: readonly TypedMcapRecords["AttachmentIndex"][]; readonly channelInfosById: ReadonlyMap; + readonly statistics: TypedMcapRecords["Statistics"] | undefined; private readable: IReadable; private decompressHandlers?: DecompressHandlers; @@ -21,16 +23,22 @@ export default class Mcap0IndexedReader { private constructor({ readable, chunkIndexes, + attachmentIndexes, + statistics, decompressHandlers, channelInfosById, }: { readable: IReadable; chunkIndexes: readonly TypedMcapRecords["ChunkIndex"][]; + attachmentIndexes: readonly TypedMcapRecords["AttachmentIndex"][]; + statistics: TypedMcapRecords["Statistics"] | undefined; decompressHandlers?: DecompressHandlers; channelInfosById: Map; }) { this.readable = readable; this.chunkIndexes = chunkIndexes; + this.attachmentIndexes = attachmentIndexes; + this.statistics = statistics; this.decompressHandlers = decompressHandlers; this.channelInfosById = channelInfosById; this.readwriteChannelInfosById = channelInfosById; @@ -174,6 +182,8 @@ export default class Mcap0IndexedReader { return new Mcap0IndexedReader({ readable, chunkIndexes, + attachmentIndexes, + statistics, decompressHandlers, channelInfosById, }); From e118d476805aeedd21a62f6912c01198351132f8 Mon Sep 17 00:00:00 2001 From: John Hurliman Date: Thu, 20 Jan 2022 14:35:21 -0800 Subject: [PATCH 024/635] C++ library can write unindexed files (#28) --- .gitignore | 1 + .vscode/settings.json | 3 +- cpp/dev.Dockerfile | 6 +- cpp/examples/bag2mcap.cpp | 41 ++++++-- cpp/mcap/include/mcap/errors.hpp | 69 +++++++++++++ cpp/mcap/include/mcap/mcap.hpp | 98 +++++++++++++++++- cpp/mcap/include/mcap/mcap.inl | 168 +++++++++++++++++++++++++++++++ 7 files changed, 372 insertions(+), 14 deletions(-) create mode 100644 cpp/mcap/include/mcap/errors.hpp create mode 100644 cpp/mcap/include/mcap/mcap.inl diff --git a/.gitignore b/.gitignore index 3c3629e647..fd4f2b066b 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ node_modules +.DS_Store diff --git a/.vscode/settings.json b/.vscode/settings.json index 8ffb8f0785..5dd9445463 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -10,5 +10,6 @@ "jest.jestCommandLine": "yarn workspace @foxglove/mcap test", // https://github.com/microsoft/vscode-cpptools/issues/722 - "C_Cpp.autoAddFileAssociations": false + "C_Cpp.autoAddFileAssociations": false, + "C_Cpp.default.cppStandard": "c++17" } diff --git a/cpp/dev.Dockerfile b/cpp/dev.Dockerfile index eca8c56d44..e40b90de63 100644 --- a/cpp/dev.Dockerfile +++ b/cpp/dev.Dockerfile @@ -20,7 +20,7 @@ RUN echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-13 main" >> /etc/a apt-get update && \ apt-get install -y --no-install-recommends --no-install-suggests \ clang-13 \ - clang-format-13 + clang-format-13 RUN update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-13 100 RUN update-alternatives --install /usr/bin/git-clang-format git-clang-format /usr/bin/git-clang-format-13 100 @@ -35,14 +35,14 @@ RUN pip --no-cache-dir install conan ENV CONAN_V2_MODE=1 RUN conan config init -RUN conan profile update settings.compiler.cppstd=17 default FROM build as build_bag2mcap COPY ./examples /src/examples/ COPY ./mcap /src/mcap/ COPY ./.clang-format /src/ RUN conan editable add ./mcap mcap/0.0.1 -RUN conan install examples --install-folder examples/build --build=zlib --build=zstd +RUN conan install examples --install-folder examples/build \ + -s compiler.cppstd=17 --build=zlib --build=zstd FROM build_bag2mcap AS bag2mcap COPY --from=build_bag2mcap /src /src diff --git a/cpp/examples/bag2mcap.cpp b/cpp/examples/bag2mcap.cpp index a2185f5d00..fac79adbbe 100644 --- a/cpp/examples/bag2mcap.cpp +++ b/cpp/examples/bag2mcap.cpp @@ -1,15 +1,42 @@ #include -#include +#include +#include +#include +#include + +constexpr char StringSchema[] = "string data"; + +mcap::Timestamp now() { + const auto timestamp = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()); + return mcap::Timestamp(timestamp.count()); +} int main() { + mcap::McapWriter writer; + + std::ofstream out("output.mcap", std::ios::binary); + writer.open(out, mcap::McapWriterOptions("ros1")); + + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + mcap::Message msg; - msg.channelId = 1; - msg.sequence = 2; - msg.publishTime = 3; - msg.recordTime = 4; - msg.data.push_back(5); + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = now(); + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + writer.write(msg); + writer.close(); - std::cout << "msg.channelId = " << msg.channelId << "\n"; return 0; } diff --git a/cpp/mcap/include/mcap/errors.hpp b/cpp/mcap/include/mcap/errors.hpp new file mode 100644 index 0000000000..74c8eef436 --- /dev/null +++ b/cpp/mcap/include/mcap/errors.hpp @@ -0,0 +1,69 @@ +#pragma once + +#include +#include + +namespace mcap { + +enum class ErrorCode { + Success = 0, + NotOpen = 1, + InvalidChannelId = 2, +}; + +} // namespace mcap + +namespace std { +// Register mcap::ErrorCode with the standard error code system +template <> +struct is_error_code_enum : true_type {}; +} // namespace std + +namespace mcap { + +namespace detail { + +// Define a custom error code category derived from std::error_category +class McapErrorCategory : public std::error_category { +public: + virtual const char* name() const noexcept override final { + return "McapError"; + } + + virtual std::string message(int c) const override final { + switch (static_cast(c)) { + case ErrorCode::Success: + return "success"; + case ErrorCode::NotOpen: + return "not open"; + case ErrorCode::InvalidChannelId: + return "invalid channel id"; + default: + return "unknown"; + } + } + + virtual std::error_condition default_error_condition(int c) const noexcept override final { + switch (static_cast(c)) { + case ErrorCode::NotOpen: + return make_error_condition(std::errc::bad_file_descriptor); + case ErrorCode::InvalidChannelId: + return make_error_condition(std::errc::invalid_argument); + default: + return std::error_condition(c, *this); + } + } +}; + +} // namespace detail + +const detail::McapErrorCategory& McapErrorCategory() { + static detail::McapErrorCategory c; + return c; +} + +inline std::error_code make_error_code(ErrorCode e) { + return {int(e), McapErrorCategory()}; +} + +} // namespace mcap diff --git a/cpp/mcap/include/mcap/mcap.hpp b/cpp/mcap/include/mcap/mcap.hpp index bf52e25dfa..1fce1c2913 100644 --- a/cpp/mcap/include/mcap/mcap.hpp +++ b/cpp/mcap/include/mcap/mcap.hpp @@ -1,12 +1,21 @@ #pragma once +#include "errors.hpp" +#include +#include #include +#include #include +#include #include namespace mcap { -constexpr char SpecVersionChar = '0'; +#define LIBRARY_VERSION "0.0.1" + +constexpr char SpecVersion = '0'; +constexpr char LibraryVersion[] = LIBRARY_VERSION; +constexpr char Magic[] = {char(137), 77, 67, 65, 80, SpecVersion, 13, 10}; // "\x89MCAP0\r\n" using ChannelId = uint16_t; using Timestamp = uint64_t; @@ -45,6 +54,13 @@ struct ChannelInfo { std::string schemaName; std::string schema; mcap::KeyValueMap userData; + + ChannelInfo(const std::string_view topicName, const std::string_view encoding, + const std::string_view schemaName, const std::string_view schema) + : topicName(topicName) + , encoding(encoding) + , schemaName(schemaName) + , schema(schema) {} }; struct Message { @@ -52,7 +68,8 @@ struct Message { uint32_t sequence; mcap::Timestamp publishTime; mcap::Timestamp recordTime; - mcap::ByteArray data; + uint64_t dataSize; + std::byte* data = nullptr; }; struct Chunk { @@ -84,7 +101,8 @@ struct Attachment { std::string name; mcap::Timestamp recordTime; std::string contentType; - mcap::ByteArray data; + uint64_t dataSize; + std::byte* data = nullptr; }; struct AttachmentIndex { @@ -108,4 +126,78 @@ struct UnknownRecord { mcap::ByteArray data; }; +struct McapWriterOptions { + bool indexed; + std::string profile; + std::string library; + mcap::KeyValueMap metadata; + + McapWriterOptions(const std::string_view profile) + : indexed(false) + , profile(profile) + , library("libmcap " LIBRARY_VERSION) {} +}; + +class McapWriter final { +public: + ~McapWriter(); + + /** + * @brief Open a new MCAP file for writing and write the header. + * + * @param stream Output stream to write to. + */ + void open(std::ostream& stream, const McapWriterOptions& options); + + /** + * @brief Write the MCAP footer and close the output stream. + */ + void close(); + + /** + * @brief Add channel info and set `info.channelId` to a generated channel id. + * The channel id is used when adding messages. + * + * @param info Description of the channel to register. The `channelId` value + * is ignored and will be set to a generated channel id. + */ + void addChannel(mcap::ChannelInfo& info); + + /** + * @brief Write a message to the output stream. + * + * @param msg Message to add. + * @return A non-zero error code on failure. + */ + std::error_code write(const mcap::Message& message); + + /** + * @brief Write an attachment to the output stream. + * + * @param attachment Attachment to add. + * @return A non-zero error code on failure. + */ + std::error_code write(const mcap::Attachment& attachment); + +private: + std::ostream* stream_ = nullptr; + std::vector channels_; + std::unordered_set writtenChannels_; + + void writeMagic(); + + void write(const mcap::Header& header); + void write(const mcap::Footer& footer); + void write(const mcap::ChannelInfo& info); + void write(const std::string_view str); + void write(OpCode value); + void write(uint16_t value); + void write(uint32_t value); + void write(uint64_t value); + void write(std::byte* data, uint64_t size); + void write(const KeyValueMap& map, uint32_t size = 0); +}; + } // namespace mcap + +#include "mcap.inl" diff --git a/cpp/mcap/include/mcap/mcap.inl b/cpp/mcap/include/mcap/mcap.inl new file mode 100644 index 0000000000..fe320db32f --- /dev/null +++ b/cpp/mcap/include/mcap/mcap.inl @@ -0,0 +1,168 @@ +// Do not compile on systems with non-8-bit bytes +static_assert(std::numeric_limits::digits == 8); + +namespace mcap { + +// Public API ////////////////////////////////////////////////////////////////// + +McapWriter::~McapWriter() { + close(); +} + +void McapWriter::open(std::ostream& stream, const McapWriterOptions& options) { + stream_ = &stream; + writeMagic(); + write(Header{options.profile, options.library, options.metadata}); +} + +void McapWriter::close() { + if (!stream_) { + return; + } + write(mcap::Footer{0, 0}); + writeMagic(); + stream_->flush(); + stream_ = nullptr; +} + +void McapWriter::addChannel(mcap::ChannelInfo& info) { + info.channelId = uint16_t(channels_.size() + 1); + channels_.push_back(info); +} + +std::error_code McapWriter::write(const mcap::Message& message) { + if (!stream_) { + return make_error_code(ErrorCode::NotOpen); + } + + // Write out channel info if we have not yet done so + if (writtenChannels_.find(message.channelId) == writtenChannels_.end()) { + const size_t index = message.channelId - 1; + if (index >= channels_.size()) { + return make_error_code(ErrorCode::InvalidChannelId); + } + + write(channels_[index]); + writtenChannels_.insert(message.channelId); + } + + const uint64_t recordSize = 2 + 4 + 8 + 8 + message.dataSize; + + write(OpCode::Message); + write(recordSize); + write(message.channelId); + write(message.sequence); + write(message.publishTime); + write(message.recordTime); + write(message.data, message.dataSize); + + return ErrorCode::Success; +} + +std::error_code McapWriter::write(const mcap::Attachment& attachment) { + if (!stream_) { + return make_error_code(ErrorCode::NotOpen); + } + + const uint64_t recordSize = + 4 + attachment.name.size() + 8 + 4 + attachment.contentType.size() + attachment.dataSize; + + write(OpCode::Attachment); + write(recordSize); + write(attachment.name); + write(attachment.recordTime); + write(attachment.contentType); + write(attachment.data, attachment.dataSize); + + return ErrorCode::Success; +} + +// Private methods ///////////////////////////////////////////////////////////// + +namespace internal { + +uint32_t KeyValueMapSize(const KeyValueMap& map) { + uint32_t size = 0; + for (const auto& [key, value] : map) { + size += 4 + key.size() + 4 + value.size(); + } + return size; +} + +} // namespace internal + +void McapWriter::writeMagic() { + stream_->write(Magic, sizeof(Magic)); +} + +void McapWriter::write(const mcap::Header& header) { + const uint32_t metadataSize = internal::KeyValueMapSize(header.metadata); + const uint64_t recordSize = + 4 + header.profile.size() + 4 + header.library.size() + 4 + metadataSize; + + write(OpCode::Header); + write(recordSize); + write(header.profile); + write(header.library); + write(header.metadata, metadataSize); +} + +void McapWriter::write(const mcap::Footer& footer) { + write(OpCode::Footer); + write(uint64_t(12)); + write(footer.indexOffset); + write(footer.indexCrc); +} + +void McapWriter::write(const mcap::ChannelInfo& info) { + const uint32_t userDataSize = internal::KeyValueMapSize(info.userData); + const uint64_t recordSize = 2 + 4 + info.topicName.size() + 4 + info.encoding.size() + 4 + + info.schemaName.size() + 4 + info.schema.size() + 4 + userDataSize + + 4; + const uint32_t crc = 0; + + write(OpCode::ChannelInfo); + write(recordSize); + write(info.channelId); + write(info.topicName); + write(info.encoding); + write(info.schemaName); + write(info.schema); + write(info.userData, userDataSize); + write(crc); +} + +void McapWriter::write(const std::string_view str) { + write(uint32_t(str.size())); + stream_->write(str.data(), str.size()); +} + +void McapWriter::write(OpCode value) { + stream_->write(reinterpret_cast(&value), sizeof(value)); +} + +void McapWriter::write(uint16_t value) { + stream_->write(reinterpret_cast(&value), sizeof(value)); +} + +void McapWriter::write(uint32_t value) { + stream_->write(reinterpret_cast(&value), sizeof(value)); +} + +void McapWriter::write(uint64_t value) { + stream_->write(reinterpret_cast(&value), sizeof(value)); +} + +void McapWriter::write(std::byte* data, uint64_t size) { + stream_->write(reinterpret_cast(data), size); +} + +void McapWriter::write(const KeyValueMap& map, uint32_t size) { + write(size > 0 ? size : internal::KeyValueMapSize(map)); + for (const auto& [key, value] : map) { + write(key); + write(value); + } +} + +} // namespace mcap From af7ea3eb20fde57b248b32b6028ef3ea5727db58 Mon Sep 17 00:00:00 2001 From: Jacob Bandes-Storch Date: Thu, 20 Jan 2022 14:43:41 -0800 Subject: [PATCH 025/635] typescript: Export detectVersion and McapPre0To0StreamReader (#31) --- typescript/scripts/validate.ts | 23 ++++++++++++----------- typescript/src/common/detectVersion.ts | 2 +- typescript/src/index.ts | 3 +++ 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index 34c6cb03c8..d2e70cc153 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -10,19 +10,20 @@ import protobufjs from "protobufjs"; import { FileDescriptorSet } from "protobufjs/ext/descriptor"; import decompressLZ4 from "wasm-lz4"; -import detectVersion, { +import { + detectVersion, DETECT_VERSION_BYTES_REQUIRED, McapVersion, -} from "../src/common/detectVersion"; -import McapPre0To0StreamReader from "../src/pre0/McapPre0To0StreamReader"; -import Mcap0IndexedReader from "../src/v0/Mcap0IndexedReader"; -import Mcap0StreamReader from "../src/v0/Mcap0StreamReader"; -import { - ChannelInfo, - DecompressHandlers, - McapStreamReader, - TypedMcapRecord, -} from "../src/v0/types"; + McapPre0To0StreamReader, + Mcap0IndexedReader, + Mcap0StreamReader, + Mcap0Types, +} from "../src"; + +type ChannelInfo = Mcap0Types.ChannelInfo; +type DecompressHandlers = Mcap0Types.DecompressHandlers; +type McapStreamReader = Mcap0Types.McapStreamReader; +type TypedMcapRecord = Mcap0Types.TypedMcapRecord; function log(...data: unknown[]) { console.log(...data); diff --git a/typescript/src/common/detectVersion.ts b/typescript/src/common/detectVersion.ts index a8b4244868..867e227d0f 100644 --- a/typescript/src/common/detectVersion.ts +++ b/typescript/src/common/detectVersion.ts @@ -9,7 +9,7 @@ export const DETECT_VERSION_BYTES_REQUIRED = 8; * Detect MCAP version from file prefix. At least `DETECT_VERSION_BYTES_REQUIRED` bytes must be * provided for the version to be detectable. */ -export default function detectVersion(prefix: DataView): McapVersion | undefined { +export function detectVersion(prefix: DataView): McapVersion | undefined { if (prefix.byteLength < DETECT_VERSION_BYTES_REQUIRED) { return undefined; } diff --git a/typescript/src/index.ts b/typescript/src/index.ts index 640b8b2d4c..208cc3c19c 100644 --- a/typescript/src/index.ts +++ b/typescript/src/index.ts @@ -1,7 +1,10 @@ export { default as McapPre0Reader } from "./pre0/McapPre0Reader"; export { default as McapPre0Writer } from "./pre0/McapPre0Writer"; +export { default as McapPre0To0StreamReader } from "./pre0/McapPre0To0StreamReader"; export * as McapPre0Types from "./pre0/types"; export { default as Mcap0IndexedReader } from "./v0/Mcap0IndexedReader"; export { default as Mcap0StreamReader } from "./v0/Mcap0StreamReader"; export * as Mcap0Types from "./v0/types"; + +export * from "./common/detectVersion"; From bf1b1501a6c40a7c0780f13cd6f3e07d443c6f84 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Fri, 21 Jan 2022 22:53:07 -0800 Subject: [PATCH 026/635] Add Mcap0IndexedWriter (#42) Co-authored-by: Jacob Bandes-Storch --- typescript/scripts/bag2proto.ts | 22 ++- typescript/scripts/validate.ts | 6 +- typescript/src/common/IWritable.ts | 4 + .../BufferBuilder.ts} | 62 +++--- typescript/src/pre0/McapPre0Writer.ts | 23 +-- typescript/src/v0/BufferBuilder.ts | 150 +++++++++++++++ typescript/src/v0/ChunkBuilder.ts | 64 ++++++ typescript/src/v0/Mcap0IndexedReader.test.ts | 4 +- typescript/src/v0/Mcap0IndexedReader.ts | 10 +- typescript/src/v0/Mcap0IndexedWriter.ts | 146 ++++++++++++++ typescript/src/v0/Mcap0RecordBuilder.test.ts | 106 ++++++++++ typescript/src/v0/Mcap0RecordBuilder.ts | 182 ++++++++++++++++++ typescript/src/v0/Mcap0RecordWriter.test.ts | 104 ---------- typescript/src/v0/Mcap0RecordWriter.ts | 116 ----------- typescript/src/v0/Mcap0UnindexedWriter.ts | 60 +++--- typescript/src/v0/index.ts | 3 +- 16 files changed, 760 insertions(+), 302 deletions(-) rename typescript/src/{common/BufferedWriter.ts => pre0/BufferBuilder.ts} (50%) create mode 100644 typescript/src/v0/BufferBuilder.ts create mode 100644 typescript/src/v0/ChunkBuilder.ts create mode 100644 typescript/src/v0/Mcap0IndexedWriter.ts create mode 100644 typescript/src/v0/Mcap0RecordBuilder.test.ts create mode 100644 typescript/src/v0/Mcap0RecordBuilder.ts delete mode 100644 typescript/src/v0/Mcap0RecordWriter.test.ts delete mode 100644 typescript/src/v0/Mcap0RecordWriter.ts diff --git a/typescript/scripts/bag2proto.ts b/typescript/scripts/bag2proto.ts index 636efd7faf..37849d81fd 100644 --- a/typescript/scripts/bag2proto.ts +++ b/typescript/scripts/bag2proto.ts @@ -16,6 +16,7 @@ import descriptor from "protobufjs/ext/descriptor"; import decompressLZ4 from "wasm-lz4"; import { Mcap0UnindexedWriter, IWritable, ChannelInfo, Message } from "../src/v0"; +import { Mcap0IndexedWriter } from "../src/v0/Mcap0IndexedWriter"; const builtinSrc = ` syntax = "proto3"; @@ -147,16 +148,23 @@ function convertTypedArrays(msg: Record): Record { - await this.handle.write(buffer); + const written = await this.handle.write(buffer); + this.totalBytesWritten += written.bytesWritten; + } + + position(): bigint { + return BigInt(this.totalBytesWritten); } } -async function convert(filePath: string) { +async function convert(filePath: string, options: { indexed: boolean }) { await decompressLZ4.isLoaded; const bzip2 = await Bzip2.init(); @@ -169,7 +177,13 @@ async function convert(filePath: string) { const fileHandle = await open(mcapFilePath, "w"); const fileHandleWritable = new FileHandleWritable(fileHandle); - const mcapFile = new Mcap0UnindexedWriter(fileHandleWritable); + let mcapFile: Mcap0UnindexedWriter | Mcap0IndexedWriter; + + if (options.indexed) { + mcapFile = new Mcap0IndexedWriter(fileHandleWritable); + } else { + mcapFile = new Mcap0UnindexedWriter(fileHandleWritable); + } await mcapFile.start({ profile: "", @@ -268,7 +282,7 @@ program .description("Convert a ROS1 .bag file to a mcap file with protobuf messages") .action(async (files: string[]) => { for (const file of files) { - await convert(file).catch(console.error); + await convert(file, { indexed: true }).catch(console.error); } }) .parse(); diff --git a/typescript/scripts/validate.ts b/typescript/scripts/validate.ts index d2e70cc153..91cf8e9129 100644 --- a/typescript/scripts/validate.ts +++ b/typescript/scripts/validate.ts @@ -246,11 +246,7 @@ async function validate( } break; } catch (error) { - log( - "Unable to read file as indexed; falling back to streaming:", - (error as Error).message, - error, - ); + log("Unable to read file as indexed; falling back to streaming:", error); } finally { await handle.close(); } diff --git a/typescript/src/common/IWritable.ts b/typescript/src/common/IWritable.ts index 6b2d656e8c..e7d86b2b28 100644 --- a/typescript/src/common/IWritable.ts +++ b/typescript/src/common/IWritable.ts @@ -2,5 +2,9 @@ * IWritable describes a writer interface. */ export interface IWritable { + // Write buffer to the output write(buffer: Uint8Array): Promise; + + // The current position in bytes from the start of the output + position(): bigint; } diff --git a/typescript/src/common/BufferedWriter.ts b/typescript/src/pre0/BufferBuilder.ts similarity index 50% rename from typescript/src/common/BufferedWriter.ts rename to typescript/src/pre0/BufferBuilder.ts index bee9b709f1..6cf20f4f6c 100644 --- a/typescript/src/common/BufferedWriter.ts +++ b/typescript/src/pre0/BufferBuilder.ts @@ -1,88 +1,88 @@ -import { IWritable } from "../v0"; - const LITTLE_ENDIAN = true; -export class BufferedWriter { - private buffer = new Uint8Array(4096); +export class BufferBuilder { + private fullBuffer = new Uint8Array(4096); private view: DataView; private textEncoder = new TextEncoder(); private offset = 0; constructor() { - this.view = new DataView(this.buffer.buffer); + this.view = new DataView(this.fullBuffer.buffer); } get length(): number { return this.offset; } - ensureCapacity(capacity: number): void { - if (this.offset + capacity >= this.buffer.byteLength) { - const newBuffer = new Uint8Array(this.buffer.byteLength * 2); - newBuffer.set(this.buffer); - - this.buffer = newBuffer; - this.view = new DataView(this.buffer.buffer); - } + get buffer(): Readonly { + return this.fullBuffer.slice(0, this.offset); } int8(value: number): void { - this.ensureCapacity(1); + this.ensureAdditionalCapacity(1); this.view.setInt8(this.offset, value); this.offset += 1; } uint8(value: number): void { - this.ensureCapacity(1); + this.ensureAdditionalCapacity(1); this.view.setUint8(this.offset, value); this.offset += 1; } int16(value: number): void { - this.ensureCapacity(2); + this.ensureAdditionalCapacity(2); this.view.setInt16(this.offset, value, LITTLE_ENDIAN); this.offset += 2; } uint16(value: number): void { - this.ensureCapacity(2); + this.ensureAdditionalCapacity(2); this.view.setUint16(this.offset, value, LITTLE_ENDIAN); this.offset += 2; } int32(value: number): void { - this.ensureCapacity(4); + this.ensureAdditionalCapacity(4); this.view.setInt32(this.offset, value, LITTLE_ENDIAN); this.offset += 4; } uint32(value: number): void { - this.ensureCapacity(4); + this.ensureAdditionalCapacity(4); this.view.setUint32(this.offset, value, LITTLE_ENDIAN); this.offset += 4; } int64(value: bigint): void { - this.ensureCapacity(8); + this.ensureAdditionalCapacity(8); this.view.setBigInt64(this.offset, value, LITTLE_ENDIAN); this.offset += 8; } uint64(value: bigint): void { - this.ensureCapacity(8); + this.ensureAdditionalCapacity(8); this.view.setBigUint64(this.offset, value, LITTLE_ENDIAN); this.offset += 8; } string(value: string): void { const stringBytes = this.textEncoder.encode(value); - this.ensureCapacity(stringBytes.byteLength + 4); + this.ensureAdditionalCapacity(stringBytes.byteLength + 4); this.uint32(value.length); - this.buffer.set(stringBytes, this.offset); + this.fullBuffer.set(stringBytes, this.offset); this.offset += stringBytes.length; } + bytes(buffer: Uint8Array): void { + this.ensureAdditionalCapacity(buffer.byteLength); + this.fullBuffer.set(buffer, this.offset); + this.offset += buffer.length; + } - async flush(writable: IWritable): Promise { - if (this.offset === 0) { - return; - } + reset(): void { + this.offset = 0; + } + + private ensureAdditionalCapacity(capacity: number): void { + if (this.offset + capacity >= this.fullBuffer.byteLength) { + const needCapacity = this.offset + capacity - this.fullBuffer.byteLength; + const newBuffer = new Uint8Array((this.fullBuffer.byteLength + needCapacity) * 2); + newBuffer.set(this.fullBuffer); - try { - await writable.write(this.buffer.slice(0, this.offset)); - } finally { - this.offset = 0; + this.fullBuffer = newBuffer; + this.view = new DataView(this.fullBuffer.buffer); } } } diff --git a/typescript/src/pre0/McapPre0Writer.ts b/typescript/src/pre0/McapPre0Writer.ts index 329c5fbf46..13bb92f062 100644 --- a/typescript/src/pre0/McapPre0Writer.ts +++ b/typescript/src/pre0/McapPre0Writer.ts @@ -1,6 +1,6 @@ import { open, FileHandle } from "fs/promises"; -import { BufferedWriter } from "../common/BufferedWriter"; +import { BufferBuilder } from "./BufferBuilder"; import { MCAP_MAGIC, RecordType } from "./constants"; import { ChannelInfo, McapRecord, Message } from "./types"; @@ -36,11 +36,12 @@ export default class McapPre0Writer { return; } // write the footer - const serializer = new BufferedWriter(); + const serializer = new BufferBuilder(); serializer.uint8(RecordType.FOOTER); serializer.uint64(0n); serializer.uint32(0); - await serializer.flush(this.writeStream); + + await this.writeStream.write(serializer.buffer); await this.writeStream?.close(); } @@ -49,35 +50,35 @@ export default class McapPre0Writer { if (!this.writeStream) { return; } - const serializer = new BufferedWriter(); + const serializer = new BufferBuilder(); serializer.uint32(info.id); serializer.string(info.topic); serializer.string(info.encoding); serializer.string(info.schemaName); serializer.string(info.schema); - const preamble = new BufferedWriter(); + const preamble = new BufferBuilder(); preamble.uint8(RecordType.CHANNEL_INFO); preamble.uint32(serializer.length); - await preamble.flush(this.writeStream); - await serializer.flush(this.writeStream); + await this.writeStream.write(preamble.buffer); + await this.writeStream.write(serializer.buffer); } private async writeMessageRecord(message: Message): Promise { if (!this.writeStream) { return; } - const serializer = new BufferedWriter(); + const serializer = new BufferBuilder(); serializer.uint32(message.channelInfo.id); serializer.uint64(message.timestamp); - const preamble = new BufferedWriter(); + const preamble = new BufferBuilder(); preamble.uint8(RecordType.MESSAGE); preamble.uint32(serializer.length + message.data.byteLength); - await preamble.flush(this.writeStream); - await serializer.flush(this.writeStream); + await this.writeStream.write(preamble.buffer); + await this.writeStream.write(serializer.buffer); await this.writeStream?.write(new Uint8Array(message.data)); } } diff --git a/typescript/src/v0/BufferBuilder.ts b/typescript/src/v0/BufferBuilder.ts new file mode 100644 index 0000000000..1809e1c9cc --- /dev/null +++ b/typescript/src/v0/BufferBuilder.ts @@ -0,0 +1,150 @@ +const LITTLE_ENDIAN = true; + +/** + * BufferBuilder provides methods to create a buffer from primitive values. The buffer grows as + * needed. + * + * Each method on buffer builder appends the value to the end of the buffer. + * + * A buffer can be reset to re-use the underlying memory and start writing at the start of the buffer. + */ +export class BufferBuilder { + private fullBuffer = new Uint8Array(4096); + private view: DataView; + private textEncoder = new TextEncoder(); + + // location of the write head - new writes will start here + private offset = 0; + + constructor() { + this.view = new DataView(this.fullBuffer.buffer); + } + + /** + * Length in bytes of the written buffer + */ + get length(): number { + return this.offset; + } + + get buffer(): Readonly { + return this.fullBuffer.slice(0, this.offset); + } + + int8(value: number): BufferBuilder { + this.ensureAdditionalCapacity(1); + this.view.setInt8(this.offset, value); + this.offset += 1; + return this; + } + uint8(value: number): BufferBuilder { + this.ensureAdditionalCapacity(1); + this.view.setUint8(this.offset, value); + this.offset += 1; + return this; + } + int16(value: number): BufferBuilder { + this.ensureAdditionalCapacity(2); + this.view.setInt16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + return this; + } + uint16(value: number): BufferBuilder { + this.ensureAdditionalCapacity(2); + this.view.setUint16(this.offset, value, LITTLE_ENDIAN); + this.offset += 2; + return this; + } + int32(value: number): BufferBuilder { + this.ensureAdditionalCapacity(4); + this.view.setInt32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + return this; + } + uint32(value: number): BufferBuilder { + this.ensureAdditionalCapacity(4); + this.view.setUint32(this.offset, value, LITTLE_ENDIAN); + this.offset += 4; + return this; + } + int64(value: bigint): BufferBuilder { + this.ensureAdditionalCapacity(8); + this.view.setBigInt64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + return this; + } + uint64(value: bigint): BufferBuilder { + this.ensureAdditionalCapacity(8); + this.view.setBigUint64(this.offset, value, LITTLE_ENDIAN); + this.offset += 8; + return this; + } + string(value: string): BufferBuilder { + const stringBytes = this.textEncoder.encode(value); + this.ensureAdditionalCapacity(stringBytes.byteLength + 4); + this.uint32(stringBytes.length); + this.fullBuffer.set(stringBytes, this.offset); + this.offset += stringBytes.length; + return this; + } + bytes(buffer: Uint8Array): BufferBuilder { + this.ensureAdditionalCapacity(buffer.byteLength); + this.fullBuffer.set(buffer, this.offset); + this.offset += buffer.length; + return this; + } + array(array: [string, string][]): BufferBuilder { + // We placeholder the byte length of the array and will come back to + // set it once we have written the array items + const sizeOffset = this.offset; + this.uint32(0); // placeholder length of 0 + + for (const [key, value] of array) { + this.string(key).string(value); + } + const currentOffset = this.offset; + + // go back and write the actual byte length of the array + this.offset = sizeOffset; + const byteLength = currentOffset - sizeOffset - 4; + this.uint32(byteLength); + + // put the offset back to after the array items + this.offset = currentOffset; + return this; + } + + /** + * Move the write head to offset bytes from the start of the buffer. + * + * If the buffer is smaller than the new offset location, the buffer expands. + */ + seek(offset: number): BufferBuilder { + this.ensureCapacity(offset); + this.offset = offset; + return this; + } + + /** + * reset the write head to the start of the buffer + */ + reset(): BufferBuilder { + this.offset = 0; + return this; + } + + private ensureAdditionalCapacity(capacity: number): void { + this.ensureCapacity(this.offset + capacity); + } + + private ensureCapacity(capacity: number): void { + if (capacity > this.fullBuffer.byteLength) { + const newSize = Math.max(this.fullBuffer.byteLength * 1.5, capacity); + const newBuffer = new Uint8Array(newSize); + newBuffer.set(this.fullBuffer); + + this.fullBuffer = newBuffer; + this.view = new DataView(this.fullBuffer.buffer); + } + } +} diff --git a/typescript/src/v0/ChunkBuilder.ts b/typescript/src/v0/ChunkBuilder.ts new file mode 100644 index 0000000000..8fea3bcbb9 --- /dev/null +++ b/typescript/src/v0/ChunkBuilder.ts @@ -0,0 +1,64 @@ +import { Mcap0RecordBuilder } from "./Mcap0RecordBuilder"; +import { ChannelInfo, Message, MessageIndex } from "./types"; + +class ChunkBuilder { + private recordWriter = new Mcap0RecordBuilder(); + private messageIndices = new Map(); + private totalMessageCount = 0; + + startTime = 0n; + endTime = 0n; + + get numMessages(): number { + return this.totalMessageCount; + } + + get buffer(): Uint8Array { + return this.recordWriter.buffer; + } + + get indices(): IterableIterator { + return this.messageIndices.values(); + } + + addChannelInfo(info: ChannelInfo): void { + if (!this.messageIndices.has(info.channelId)) { + this.messageIndices.set(info.channelId, { + channelId: info.channelId, + count: 0, + records: [], + }); + } + this.recordWriter.writeChannelInfo(info); + } + + addMessage(message: Message): void { + if (this.startTime === 0n) { + this.startTime = message.recordTime; + } + this.endTime = message.recordTime; + + const messageIndex = this.messageIndices.get(message.channelId) ?? { + channelId: message.channelId, + count: 0, + records: [], + }; + + this.messageIndices.set(message.channelId, messageIndex); + + messageIndex.count += 1; + messageIndex.records.push([message.recordTime, BigInt(this.recordWriter.length)]); + + this.totalMessageCount += 1; + this.recordWriter.writeMessage(message); + } + + reset(): void { + this.startTime = 0n; + this.endTime = 0n; + this.totalMessageCount = 0; + this.messageIndices.clear(); + } +} + +export { ChunkBuilder }; diff --git a/typescript/src/v0/Mcap0IndexedReader.test.ts b/typescript/src/v0/Mcap0IndexedReader.test.ts index 9553cdaf04..445cca9edb 100644 --- a/typescript/src/v0/Mcap0IndexedReader.test.ts +++ b/typescript/src/v0/Mcap0IndexedReader.test.ts @@ -346,7 +346,7 @@ describe("Mcap0IndexedReader", () => { Opcode.CHUNK_INDEX, crcSuffix([ ...uint64LE(0n), // start time - ...uint64LE(1n), // end time + ...uint64LE(2n), // end time ...uint64LE(0n), // offset ...keyValues(uint16LE, uint64LE, []), // message index offsets ...uint64LE(0n), // message index length @@ -359,7 +359,7 @@ describe("Mcap0IndexedReader", () => { Opcode.CHUNK_INDEX, crcSuffix([ ...uint64LE(1n), // start time - ...uint64LE(2n), // end time + ...uint64LE(3n), // end time ...uint64LE(0n), // offset ...keyValues(uint16LE, uint64LE, []), // message index offsets ...uint64LE(0n), // message index length diff --git a/typescript/src/v0/Mcap0IndexedReader.ts b/typescript/src/v0/Mcap0IndexedReader.ts index 5badf19636..8c9521e3ee 100644 --- a/typescript/src/v0/Mcap0IndexedReader.ts +++ b/typescript/src/v0/Mcap0IndexedReader.ts @@ -217,8 +217,14 @@ export default class Mcap0IndexedReader { ); for (let i = 0; i + 1 < relevantChunks.length; i++) { - if (relevantChunks[i]!.endTime >= relevantChunks[i + 1]!.startTime) { - throw new Error("Overlapping chunks are not currently supported"); + if (relevantChunks[i]!.endTime > relevantChunks[i + 1]!.startTime) { + throw new Error( + `Overlapping chunks are not currently supported; chunk at offset ${ + relevantChunks[i]!.chunkOffset + } ends at ${relevantChunks[i]!.endTime} and chunk at offset ${ + relevantChunks[i + 1]!.chunkOffset + } starts at ${relevantChunks[i + 1]!.startTime}`, + ); } } for (const chunkIndex of relevantChunks) { diff --git a/typescript/src/v0/Mcap0IndexedWriter.ts b/typescript/src/v0/Mcap0IndexedWriter.ts new file mode 100644 index 0000000000..8451159096 --- /dev/null +++ b/typescript/src/v0/Mcap0IndexedWriter.ts @@ -0,0 +1,146 @@ +import { IWritable } from "../common/IWritable"; +import { ChunkBuilder } from "./ChunkBuilder"; +import { Mcap0RecordBuilder } from "./Mcap0RecordBuilder"; +import { ChannelInfo, Message, Header, Attachment, Chunk, ChunkIndex } from "./types"; + +/** + * Mcap0IndexedWriter provides an interface for writing messages + * to indexed mcap files. + * + * NOTE: callers must wait on any method call to complete before calling another + * method. Calling a method before another has completed will result in a corrupt + * mcap file. + */ +export class Mcap0IndexedWriter { + private writable: IWritable; + private recordWriter = new Mcap0RecordBuilder(); + private channelInfos = new Map(); + private writtenChannelIds = new Set(); + private chunkIndices: ChunkIndex[] = []; + private chunkBuilder: ChunkBuilder = new ChunkBuilder(); + + constructor(writable: IWritable) { + this.writable = writable; + } + + async start(header: Header): Promise { + this.recordWriter.writeMagic(); + this.recordWriter.writeHeader(header); + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + } + + async end(): Promise { + await this.finalizeChunk(); + + const position = this.writable.position(); + + for (const channelInfo of this.channelInfos.values()) { + this.recordWriter.writeChannelInfo(channelInfo); + } + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + + for (const chunkIndex of this.chunkIndices) { + this.recordWriter.writeChunkIndex(chunkIndex); + } + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + + this.recordWriter.writeFooter({ + indexOffset: position, + indexCrc: 0, + }); + + this.recordWriter.writeMagic(); + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + } + + /** + * Add channel info and return a generated channel id. The channel id is used when adding messages. + */ + async registerChannel(info: Omit): Promise { + const channelId = this.channelInfos.size + 1; + this.channelInfos.set(channelId, { + ...info, + channelId, + }); + + return channelId; + } + + async addMessage(message: Message): Promise { + // write out channel id if we have not yet done so + if (!this.writtenChannelIds.has(message.channelId)) { + const channelInfo = this.channelInfos.get(message.channelId); + if (!channelInfo) { + throw new Error( + `Mcap0UnindexedWriter#addMessage failed: missing channel info for id ${message.channelId}`, + ); + } + + this.chunkBuilder.addChannelInfo(channelInfo); + this.writtenChannelIds.add(message.channelId); + } + + this.chunkBuilder.addMessage(message); + + if (this.chunkBuilder.numMessages > 10) { + await this.finalizeChunk(); + } + } + + async addAttachment(attachment: Attachment): Promise { + this.recordWriter.writeAttachment(attachment); + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + } + + private async finalizeChunk(): Promise { + if (this.chunkBuilder.numMessages === 0) { + return; + } + + const chunkData = this.chunkBuilder.buffer; + const chunkRecord: Chunk = { + uncompressedSize: BigInt(chunkData.length), + uncompressedCrc: 0, + compression: "", + records: chunkData, + }; + + const offset = this.writable.position(); + const chunkIndex: ChunkIndex = { + startTime: this.chunkBuilder.startTime, + endTime: this.chunkBuilder.endTime, + chunkOffset: offset, + messageIndexOffsets: new Map(), + messageIndexLength: 0n, + compression: chunkRecord.compression, + compressedSize: 0n, + uncompressedSize: chunkRecord.uncompressedSize, + }; + + this.recordWriter.writeChunk(chunkRecord); + + const startPosition = this.writable.position(); + for (const messageIndex of this.chunkBuilder.indices) { + chunkIndex.messageIndexOffsets.set(messageIndex.channelId, this.writable.position()); + this.recordWriter.writeMessageIndex(messageIndex); + } + + chunkIndex.messageIndexLength = this.writable.position() - startPosition; + + this.chunkIndices.push(chunkIndex); + this.chunkBuilder.reset(); + + await this.writable.write(this.recordWriter.buffer); + this.recordWriter.reset(); + } +} diff --git a/typescript/src/v0/Mcap0RecordBuilder.test.ts b/typescript/src/v0/Mcap0RecordBuilder.test.ts new file mode 100644 index 0000000000..afe7dcb828 --- /dev/null +++ b/typescript/src/v0/Mcap0RecordBuilder.test.ts @@ -0,0 +1,106 @@ +import { BufferBuilder } from "./BufferBuilder"; +import { Mcap0RecordBuilder } from "./Mcap0RecordBuilder"; + +describe("Mcap0BufferRecordBuilder", () => { + it("writes magic", async () => { + const writer = new Mcap0RecordBuilder(); + + writer.writeMagic(); + expect(writer.buffer).toEqual(new Uint8Array([137, 77, 67, 65, 80, 48, 13, 10])); + }); + + it("writes header", async () => { + const writer = new Mcap0RecordBuilder(); + + writer.writeHeader({ + profile: "foo", + library: "bar", + metadata: [["something", "magical"]], + }); + + const byteLength = 42; + const buffer = new BufferBuilder(); + buffer + .uint8(1) // opcode + .uint64(BigInt(byteLength)) // record byte length + .string("foo") + .string("bar") + .uint32(24) // metadata byte length + .string("something") + .string("magical"); + + expect(buffer.length).toEqual(byteLength + 9); + expect(writer.buffer).toEqual(buffer.buffer); + }); + + it("writes footer", async () => { + const writer = new Mcap0RecordBuilder(); + + writer.writeFooter({ + indexOffset: 0n, + indexCrc: 0, + }); + + const buffer = new BufferBuilder(); + buffer + .uint8(2) // opcode + .uint64(BigInt(12)) // record byte length + .uint64(0n) + .uint32(0); + + expect(buffer.length).toEqual(12 + 9); + expect(writer.buffer).toEqual(buffer.buffer); + }); + + it("writes channel info", async () => { + const writer = new Mcap0RecordBuilder(); + + writer.writeChannelInfo({ + channelId: 1, + topicName: "/topic", + encoding: "encoding", + schemaName: "schema name", + schema: "schema", + userData: [], + }); + + const buffer = new BufferBuilder(); + buffer + .uint8(3) // opcode + .uint64(BigInt(57)) // record byte length + .uint16(1) + .string("/topic") + .string("encoding") + .string("schema name") + .string("schema") + .uint32(0) // user data length + .uint32(0); // crc + + expect(buffer.length).toEqual(57 + 9); + expect(writer.buffer).toEqual(buffer.buffer); + }); + + it("writes messages", async () => { + const writer = new Mcap0RecordBuilder(); + + writer.writeMessage({ + channelId: 1, + publishTime: 3n, + recordTime: 5n, + sequence: 7, + messageData: new Uint8Array(), + }); + + const buffer = new BufferBuilder(); + buffer + .uint8(4) // opcode + .uint64(BigInt(22)) // record byte length + .uint16(1) + .uint32(7) + .uint64(3n) + .uint64(5n); + + expect(buffer.length).toEqual(22 + 9); + expect(writer.buffer).toEqual(buffer.buffer); + }); +}); diff --git a/typescript/src/v0/Mcap0RecordBuilder.ts b/typescript/src/v0/Mcap0RecordBuilder.ts new file mode 100644 index 0000000000..958070f033 --- /dev/null +++ b/typescript/src/v0/Mcap0RecordBuilder.ts @@ -0,0 +1,182 @@ +import { BufferBuilder } from "./BufferBuilder"; +import { MCAP0_MAGIC, Opcode } from "./constants"; +import { + ChannelInfo, + Header, + Footer, + Message, + Attachment, + Chunk, + ChunkIndex, + MessageIndex, +} from "./types"; + +/** + * Mcap0RecordBuilder provides methods to serialize mcap records to a buffer in memory. + * + * It makes no effort to ensure spec compatability on the order of records, this is the responsibility + * of the caller. + * + * You'll likely want to use one of the higher level writer interfaces unless you are building your + * own higher level writing interface. + */ +export class Mcap0RecordBuilder { + private bufferBuilder = new BufferBuilder(); + + get length(): number { + return this.bufferBuilder.length; + } + + get buffer(): Uint8Array { + return this.bufferBuilder.buffer; + } + + reset(): void { + this.bufferBuilder.reset(); + } + + writeMagic(): void { + this.bufferBuilder.bytes(new Uint8Array(MCAP0_MAGIC)); + } + + writeHeader(header: Header): void { + this.bufferBuilder.uint8(Opcode.HEADER); + + const startPosition = this.bufferBuilder.length; + this.bufferBuilder + .uint64(0n) // placeholder size + .string(header.profile) + .string(header.library) + .array(header.metadata); + + const endPosition = this.bufferBuilder.length; + this.bufferBuilder + .seek(startPosition) + .uint64(BigInt(endPosition - startPosition - 8)) + .seek(endPosition); + } + + writeFooter(footer: Footer): void { + this.bufferBuilder + .uint8(Opcode.FOOTER) + .uint64(12n) // footer is fixed length + .uint64(footer.indexOffset) + .uint32(footer.indexCrc); + } + + writeChannelInfo(info: ChannelInfo): void { + this.bufferBuilder.uint8(Opcode.CHANNEL_INFO); + + const startPosition = this.bufferBuilder.length; + this.bufferBuilder + .uint64(0n) // placeholder + .uint16(info.channelId) + .string(info.topicName) + .string(info.encoding) + .string(info.schemaName) + .string(info.schema) + .array(info.userData) + .uint32(0); // crc + + const endPosition = this.bufferBuilder.length; + this.bufferBuilder + .seek(startPosition) + .uint64(BigInt(endPosition - startPosition - 8)) + .seek(endPosition); + } + + writeMessage(message: Message): void { + this.bufferBuilder + .uint8(Opcode.MESSAGE) + .uint64(BigInt(2 + 4 + 8 + 8 + message.messageData.byteLength)) + .uint16(message.channelId) + .uint32(message.sequence) + .uint64(message.publishTime) + .uint64(message.recordTime) + .bytes(message.messageData); + } + + writeAttachment(attachment: Attachment): void { + this.bufferBuilder.uint8(Opcode.ATTACHMENT); + + const startPosition = this.bufferBuilder.length; + this.bufferBuilder + .uint64(0n) // placeholder + .string(attachment.name) + .uint64(attachment.recordTime) + .string(attachment.contentType) + .bytes(attachment.data); + + const endPosition = this.bufferBuilder.length; + this.bufferBuilder + .seek(startPosition) + .uint64(BigInt(endPosition - startPosition - 8)) + .seek(endPosition); + } + + writeChunk(chunk: Chunk): void { + this.bufferBuilder.uint8(Opcode.CHUNK); + + const startPosition = this.bufferBuilder.length; + this.bufferBuilder + .uint64(0n) // palceholder + .uint64(chunk.uncompressedSize) + .uint32(chunk.uncompressedCrc) + .string(chunk.compression) + .bytes(chunk.records); + + const endPosition = this.bufferBuilder.length; + this.bufferBuilder + .seek(startPosition) + .uint64(BigInt(endPosition - startPosition - 8)) + .seek(endPosition); + } + + writeChunkIndex(chunkIndex: ChunkIndex): void { + this.bufferBuilder.uint8(Opcode.CHUNK_INDEX); + + const startPosition = this.bufferBuilder.length; + this.bufferBuilder + .uint64(0n) // placeholder + .uint64(chunkIndex.startTime) + .uint64(chunkIndex.endTime) + .uint64(chunkIndex.chunkOffset) + .uint32(chunkIndex.messageIndexOffsets.size * 10); + + for (const [channelId, offset] of chunkIndex.messageIndexOffsets) { + this.bufferBuilder.uint16(channelId).uint64(offset); + } + + this.bufferBuilder + .uint64(chunkIndex.messageIndexLength) + .string(chunkIndex.compression) + .uint64(chunkIndex.compressedSize) + .uint64(chunkIndex.uncompressedSize) + .uint32(0); + + const endPosition = this.bufferBuilder.length; + this.bufferBuilder + .seek(startPosition) + .uint64(BigInt(endPosition - startPosition - 8)) + .seek(endPosition); + } + + writeMessageIndex(messageIndex: MessageIndex): void { + this.bufferBuilder.uint8(Opcode.MESSAGE_INDEX); + + const messageIndexRecordsByteLength = messageIndex.records.length * 16; + + this.bufferBuilder + .uint64(BigInt(2 + 4 + 4 + messageIndexRecordsByteLength + 1)) + .uint16(messageIndex.channelId) + .uint32(messageIndex.count) + .uint32(messageIndexRecordsByteLength); + + for (const record of messageIndex.records) { + this.bufferBuilder.uint64(record[0]).uint64(record[1]); + } + + // crc + this.bufferBuilder.uint32(0); + } +} diff --git a/typescript/src/v0/Mcap0RecordWriter.test.ts b/typescript/src/v0/Mcap0RecordWriter.test.ts deleted file mode 100644 index 9c50af54d6..0000000000 --- a/typescript/src/v0/Mcap0RecordWriter.test.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { IWritable } from "."; -import { Mcap0RecordWriter } from "./Mcap0RecordWriter"; - -class MemoryWritable implements IWritable { - private fullBuffer: Uint8Array; - private offset = 0; - - get length() { - return this.offset; - } - - get buffer(): Readonly { - return this.fullBuffer.slice(0, this.offset); - } - - constructor() { - this.fullBuffer = new Uint8Array(4096); - } - - async write(buffer: Uint8Array): Promise { - this.fullBuffer.set(buffer, this.offset); - this.offset += buffer.length; - } -} - -describe("Mcap0RecordWriter", () => { - it("writes magic", async () => { - const memoryWritable = new MemoryWritable(); - const writer = new Mcap0RecordWriter(memoryWritable); - - await writer.writeMagic(); - expect(memoryWritable.buffer).toEqual(new Uint8Array([137, 77, 67, 65, 80, 48, 13, 10])); - }); - - it("writes header", async () => { - const memoryWritable = new MemoryWritable(); - const writer = new Mcap0RecordWriter(memoryWritable); - - await writer.writeHeader({ - profile: "foo", - library: "bar", - metadata: [["something", "magical"]], - }); - expect(memoryWritable.buffer).toEqual( - new Uint8Array([ - 1, 42, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 102, 111, 111, 3, 0, 0, 0, 98, 97, 114, 24, 0, 0, 0, - 9, 0, 0, 0, 115, 111, 109, 101, 116, 104, 105, 110, 103, 7, 0, 0, 0, 109, 97, 103, 105, 99, - 97, 108, - ]), - ); - }); - - it("writes footer", async () => { - const memoryWritable = new MemoryWritable(); - const writer = new Mcap0RecordWriter(memoryWritable); - - await writer.writeFooter({ - indexOffset: 0n, - indexCrc: 0, - }); - expect(memoryWritable.buffer).toEqual( - new Uint8Array([2, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), - ); - }); - - it("writes channel info", async () => { - const memoryWritable = new MemoryWritable(); - const writer = new Mcap0RecordWriter(memoryWritable); - - await writer.writeChannelInfo({ - channelId: 1, - topicName: "topic", - encoding: "enc", - schemaName: "foo", - schema: "bar", - userData: [], - }); - expect(memoryWritable.buffer).toEqual( - new Uint8Array([ - 3, 40, 0, 0, 0, 0, 0, 0, 0, 1, 0, 5, 0, 0, 0, 116, 111, 112, 105, 99, 3, 0, 0, 0, 101, 110, - 99, 3, 0, 0, 0, 102, 111, 111, 3, 0, 0, 0, 98, 97, 114, 0, 0, 0, 0, 0, 0, 0, 0, - ]), - ); - }); - - it("writes messages", async () => { - const memoryWritable = new MemoryWritable(); - const writer = new Mcap0RecordWriter(memoryWritable); - - await writer.writeMessage({ - channelId: 1, - publishTime: 3n, - recordTime: 5n, - sequence: 7, - messageData: new Uint8Array(), - }); - expect(memoryWritable.buffer).toEqual( - new Uint8Array([ - 4, 22, 0, 0, 0, 0, 0, 0, 0, 1, 0, 7, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, - 0, - ]), - ); - }); -}); diff --git a/typescript/src/v0/Mcap0RecordWriter.ts b/typescript/src/v0/Mcap0RecordWriter.ts deleted file mode 100644 index 49e2cdc46b..0000000000 --- a/typescript/src/v0/Mcap0RecordWriter.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { BufferedWriter } from "../common/BufferedWriter"; -import { IWritable } from "../common/IWritable"; -import { MCAP0_MAGIC, Opcode } from "./constants"; -import { ChannelInfo, Header, Footer, Message, Attachment } from "./types"; - -/** - * Mcap0RecordWriter provides methods to serialize mcap records to an IWritable. - * - * It makes no effort to ensure spec compatability on the order of records, this is the responsibility - * of the caller. - * - * Unless you are building your own higher level writer interface, you'll likely want to use one of - * the higher level writer interfaces. - */ -export class Mcap0RecordWriter { - private recordPrefixWriter: BufferedWriter; - private bufferedWriter: BufferedWriter; - private writable: IWritable; - - constructor(writable: IWritable) { - this.recordPrefixWriter = new BufferedWriter(); - this.bufferedWriter = new BufferedWriter(); - this.writable = writable; - } - - async writeMagic(): Promise { - await this.writable.write(new Uint8Array(MCAP0_MAGIC)); - } - - async writeHeader(header: Header): Promise { - this.bufferedWriter.string(header.profile); - this.bufferedWriter.string(header.library); - - const keyValueWriter = new BufferedWriter(); - for (const item of header.metadata) { - const [key, value] = item; - keyValueWriter.string(key); - keyValueWriter.string(value); - } - - this.bufferedWriter.uint32(keyValueWriter.length); - - this.recordPrefixWriter.uint8(Opcode.HEADER); - this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + keyValueWriter.length)); - - await this.recordPrefixWriter.flush(this.writable); - await this.bufferedWriter.flush(this.writable); - await keyValueWriter.flush(this.writable); - } - - async writeFooter(footer: Footer): Promise { - this.recordPrefixWriter.uint8(Opcode.FOOTER); - this.recordPrefixWriter.uint64(12n); // footer is fixed length - this.recordPrefixWriter.uint64(footer.indexOffset); - this.recordPrefixWriter.uint32(footer.indexCrc); - - await this.recordPrefixWriter.flush(this.writable); - } - - async writeChannelInfo(info: ChannelInfo): Promise { - this.bufferedWriter.uint16(info.channelId); - this.bufferedWriter.string(info.topicName); - this.bufferedWriter.string(info.encoding); - this.bufferedWriter.string(info.schemaName); - this.bufferedWriter.string(info.schema); - - const keyValueWriter = new BufferedWriter(); - for (const item of info.userData) { - const [key, value] = item; - keyValueWriter.string(key); - keyValueWriter.string(value); - } - - this.bufferedWriter.uint32(keyValueWriter.length); - - // Add crc to keyValueWriter after adding the length of key/values to the bufferWriter - // This allows the crc to serialize our with the keyValueWriter - keyValueWriter.uint32(0); - - this.recordPrefixWriter.uint8(Opcode.CHANNEL_INFO); - this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + keyValueWriter.length)); - - await this.recordPrefixWriter.flush(this.writable); - await this.bufferedWriter.flush(this.writable); - await keyValueWriter.flush(this.writable); - } - - async writeMessage(message: Message): Promise { - this.bufferedWriter.uint16(message.channelId); - this.bufferedWriter.uint32(message.sequence); - this.bufferedWriter.uint64(message.publishTime); - this.bufferedWriter.uint64(message.recordTime); - - this.recordPrefixWriter.uint8(Opcode.MESSAGE); - this.recordPrefixWriter.uint64( - BigInt(this.bufferedWriter.length + message.messageData.byteLength), - ); - - await this.recordPrefixWriter.flush(this.writable); - await this.bufferedWriter.flush(this.writable); - await this.writable.write(message.messageData); - } - - async writeAttachment(attachment: Attachment): Promise { - this.bufferedWriter.string(attachment.name); - this.bufferedWriter.uint64(attachment.recordTime); - this.bufferedWriter.string(attachment.contentType); - - this.recordPrefixWriter.uint8(Opcode.CHANNEL_INFO); - this.recordPrefixWriter.uint64(BigInt(this.bufferedWriter.length + attachment.data.byteLength)); - - await this.recordPrefixWriter.flush(this.writable); - await this.bufferedWriter.flush(this.writable); - await this.writable.write(attachment.data); - } -} diff --git a/typescript/src/v0/Mcap0UnindexedWriter.ts b/typescript/src/v0/Mcap0UnindexedWriter.ts index 6fdd82fa4c..25008c269d 100644 --- a/typescript/src/v0/Mcap0UnindexedWriter.ts +++ b/typescript/src/v0/Mcap0UnindexedWriter.ts @@ -1,61 +1,69 @@ import { IWritable } from "../common/IWritable"; -import { Mcap0RecordWriter } from "./Mcap0RecordWriter"; +import { Mcap0RecordBuilder } from "./Mcap0RecordBuilder"; import { ChannelInfo, Message, Header, Attachment } from "./types"; +/** + * Mcap0UnindexedWriter provides an interface for writing messages + * to unindexed mcap files. + * + * NOTE: callers must wait on any method call to complete before calling another + * method. Calling a method before another has completed will result in a corrupt + * mcap file. + */ export class Mcap0UnindexedWriter { - private recordWriter: Mcap0RecordWriter; + private bufferRecordBuilder: Mcap0RecordBuilder; + private writable: IWritable; - private channelInfos = new Map(); - private writtenChannelIds = new Set(); + private nextChannelId = 1; constructor(writable: IWritable) { - this.recordWriter = new Mcap0RecordWriter(writable); + this.writable = writable; + this.bufferRecordBuilder = new Mcap0RecordBuilder(); } async start(header: Header): Promise { - await this.recordWriter.writeMagic(); - await this.recordWriter.writeHeader(header); + this.bufferRecordBuilder.writeMagic(); + this.bufferRecordBuilder.writeHeader(header); + + await this.writable.write(this.bufferRecordBuilder.buffer); + this.bufferRecordBuilder.reset(); } async end(): Promise { - await this.recordWriter.writeFooter({ + this.bufferRecordBuilder.writeFooter({ indexOffset: 0n, indexCrc: 0, }); - await this.recordWriter.writeMagic(); + await this.writable.write(this.bufferRecordBuilder.buffer); + this.bufferRecordBuilder.reset(); } /** * Add channel info and return a generated channel id. The channel id is used when adding messages. */ async registerChannel(info: Omit): Promise { - const channelId = this.channelInfos.size + 1; - this.channelInfos.set(channelId, { + const channelId = this.nextChannelId; + this.bufferRecordBuilder.writeChannelInfo({ ...info, channelId, }); + await this.writable.write(this.bufferRecordBuilder.buffer); + this.bufferRecordBuilder.reset(); + + this.nextChannelId += 1; return channelId; } async addMessage(message: Message): Promise { - // write out channel id if we have not yet done so - if (!this.writtenChannelIds.has(message.channelId)) { - const channelInfo = this.channelInfos.get(message.channelId); - if (!channelInfo) { - throw new Error( - `Mcap0UnindexedWriter#addMessage failed: missing channel info for id ${message.channelId}`, - ); - } - - await this.recordWriter.writeChannelInfo(channelInfo); - this.writtenChannelIds.add(message.channelId); - } - - await this.recordWriter.writeMessage(message); + this.bufferRecordBuilder.writeMessage(message); + await this.writable.write(this.bufferRecordBuilder.buffer); + this.bufferRecordBuilder.reset(); } async addAttachment(attachment: Attachment): Promise { - await this.recordWriter.writeAttachment(attachment); + this.bufferRecordBuilder.writeAttachment(attachment); + await this.writable.write(this.bufferRecordBuilder.buffer); + this.bufferRecordBuilder.reset(); } } diff --git a/typescript/src/v0/index.ts b/typescript/src/v0/index.ts index c5305a0e24..35c9e1873a 100644 --- a/typescript/src/v0/index.ts +++ b/typescript/src/v0/index.ts @@ -1,4 +1,5 @@ export * from "../common/IWritable"; export * from "./types"; -export * from "./Mcap0RecordWriter"; +export * from "./Mcap0RecordBuilder"; export * from "./Mcap0UnindexedWriter"; +export * from "./Mcap0IndexedWriter"; From 5202e6e1d89278b6022d4746719008779c5315cd Mon Sep 17 00:00:00 2001 From: John Hurliman Date: Sat, 22 Jan 2022 14:19:25 -0800 Subject: [PATCH 027/635] [C++] Unified writer for chunked and unchunked files (#50) * Replace std::error_code with mcap::Status, IWritable interface * [C++] Unified writer for chunked and unchunked files * Benchmarking, `make build-host` --- cpp/Makefile | 13 + cpp/bench/CMakeLists.txt | 8 + cpp/bench/conanfile.py | 13 + cpp/bench/run.cpp | 214 ++++++++++++++++ cpp/dev.Dockerfile | 27 ++- cpp/docker-compose.yml | 6 + cpp/examples/bag2mcap.cpp | 5 +- cpp/mcap/include/mcap/errors.hpp | 73 ++---- cpp/mcap/include/mcap/mcap.hpp | 139 +++++++++-- cpp/mcap/include/mcap/mcap.inl | 405 +++++++++++++++++++++++++------ 10 files changed, 742 insertions(+), 161 deletions(-) create mode 100644 cpp/bench/CMakeLists.txt create mode 100644 cpp/bench/conanfile.py create mode 100644 cpp/bench/run.cpp diff --git a/cpp/Makefile b/cpp/Makefile index 2bc09f837a..482bada9ae 100644 --- a/cpp/Makefile +++ b/cpp/Makefile @@ -4,6 +4,15 @@ default: build build: docker compose build +.PHONE: build-host +build-host: + conan install bench --install-folder bench/build/Release \ + -s compiler.cppstd=17 -s build_type=Release --build missing + conan install examples --install-folder examples/build/Release \ + -s compiler.cppstd=17 -s build_type=Release --build missing + conan build examples --build-folder examples/build/Release + conan build bench --build-folder bench/build/Release + .PHONY: format-check format-check: docker compose run --rm -v .:/src base python3 scripts/format.py /src @@ -12,6 +21,10 @@ format-check: format-fix: docker compose run --rm -v .:/src base python3 scripts/format.py --fix /src +.PHONY: bench +bench: + docker compose run bench + .PHONY: bag2mcap bag2mcap: docker compose run bag2mcap diff --git a/cpp/bench/CMakeLists.txt b/cpp/bench/CMakeLists.txt new file mode 100644 index 0000000000..fffb153bf8 --- /dev/null +++ b/cpp/bench/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.1) +project(McapBenchmarks CXX) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup() + +add_executable(bench-tests run.cpp) +target_link_libraries(bench-tests ${CONAN_LIBS}) diff --git a/cpp/bench/conanfile.py b/cpp/bench/conanfile.py new file mode 100644 index 0000000000..a8fa25a3f5 --- /dev/null +++ b/cpp/bench/conanfile.py @@ -0,0 +1,13 @@ +from conans import ConanFile, CMake + + +class McapBenchmarksConan(ConanFile): + settings = "os", "compiler", "build_type", "arch" + generators = "cmake" + requires = "mcap/0.0.1" + build_requires = "benchmark/1.6.0" + + def build(self): + cmake = CMake(self) + cmake.configure() + cmake.build() diff --git a/cpp/bench/run.cpp b/cpp/bench/run.cpp new file mode 100644 index 0000000000..9a04504389 --- /dev/null +++ b/cpp/bench/run.cpp @@ -0,0 +1,214 @@ +#include + +#include + +#include +#include +#include +#include + +constexpr char StringSchema[] = "string data"; + +static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create an unchunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunked = false; + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + const auto iterations = size_t(state.range(0)); + + while (state.KeepRunning()) { + for (size_t i = 0; i < iterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterBufferedWriterChunked(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create a chunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunked = true; + options.chunkSize = uint64_t(state.range(1)); + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + const auto iterations = size_t(state.range(0)); + + while (state.KeepRunning()) { + for (size_t i = 0; i < iterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterStreamWriterUnchunked(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create an unchunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunked = false; + + // Open an output file stream and write the file header + std::ofstream out("benchmark.mcap", std::ios::binary); + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + const auto iterations = size_t(state.range(0)); + + while (state.KeepRunning()) { + for (size_t i = 0; i < iterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file and delete it + writer.close(); + std::remove("benchmark.mcap"); +} + +static void BM_McapWriterStreamWriterChunked(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create a chunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunked = true; + options.chunkSize = uint64_t(state.range(1)); + + // Open an output file stream and write the file header + std::ofstream out("benchmark.mcap", std::ios::binary); + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + const auto iterations = size_t(state.range(0)); + + while (state.KeepRunning()) { + for (size_t i = 0; i < iterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file and delete it + writer.close(); + std::remove("benchmark.mcap"); +} + +int main(int argc, char* argv[]) { + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterUnchunked", + BM_McapWriterBufferedWriterUnchunked) + ->Arg(10000); + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterChunked", + BM_McapWriterBufferedWriterChunked) + ->Args({10000, 1}) + ->Args({10000, 10}) + ->Args({10000, 100}) + ->Args({10000, 1000}) + ->Args({10000, 10000}) + ->Args({10000, 100000}) + ->Args({10000, 1000000}) + ->Args({10000, 10000000}); + benchmark::RegisterBenchmark("BM_McapWriterStreamWriterUnchunked", + BM_McapWriterStreamWriterUnchunked) + ->Arg(10000); + benchmark::RegisterBenchmark("BM_McapWriterStreamWriterChunked", BM_McapWriterStreamWriterChunked) + ->Args({10000, 1}) + ->Args({10000, 10}) + ->Args({10000, 100}) + ->Args({10000, 1000}) + ->Args({10000, 10000}) + ->Args({10000, 100000}) + ->Args({10000, 1000000}) + ->Args({10000, 10000000}); + benchmark::Initialize(&argc, argv); + benchmark::RunSpecifiedBenchmarks(); + + return 0; +} diff --git a/cpp/dev.Dockerfile b/cpp/dev.Dockerfile index e40b90de63..34047deae9 100644 --- a/cpp/dev.Dockerfile +++ b/cpp/dev.Dockerfile @@ -36,16 +36,25 @@ RUN pip --no-cache-dir install conan ENV CONAN_V2_MODE=1 RUN conan config init -FROM build as build_bag2mcap +FROM build as build_executables +COPY ./bench /src/bench/ COPY ./examples /src/examples/ COPY ./mcap /src/mcap/ COPY ./.clang-format /src/ RUN conan editable add ./mcap mcap/0.0.1 -RUN conan install examples --install-folder examples/build \ - -s compiler.cppstd=17 --build=zlib --build=zstd - -FROM build_bag2mcap AS bag2mcap -COPY --from=build_bag2mcap /src /src -COPY --from=build_bag2mcap /src/examples/build/ /src/examples/build/ -RUN conan build examples --build-folder examples/build -ENTRYPOINT ["examples/build/bin/bag2mcap"] +RUN conan install bench --install-folder bench/build/Release \ + -s compiler.cppstd=17 -s build_type=Release --build missing +RUN conan install examples --install-folder examples/build/Release \ + -s compiler.cppstd=17 -s build_type=Release --build missing + +FROM build_executables AS bag2mcap +COPY --from=build_executables /src /src +COPY --from=build_executables /src/examples/build/ /src/examples/build/ +RUN conan build examples --build-folder examples/build/Release +ENTRYPOINT ["examples/build/Release/bin/bag2mcap"] + +FROM build_executables AS bench +COPY --from=build_executables /src /src +COPY --from=build_executables /src/bench/build/ /src/bench/build/ +RUN conan build bench --build-folder bench/build/Release +ENTRYPOINT ["bench/build/Release/bin/bench-tests"] diff --git a/cpp/docker-compose.yml b/cpp/docker-compose.yml index c93f493d5e..66bf105d8c 100644 --- a/cpp/docker-compose.yml +++ b/cpp/docker-compose.yml @@ -11,6 +11,12 @@ services: dockerfile: dev.Dockerfile target: build + bench: + build: + context: . + dockerfile: dev.Dockerfile + target: bench + bag2mcap: build: context: . diff --git a/cpp/examples/bag2mcap.cpp b/cpp/examples/bag2mcap.cpp index fac79adbbe..07ac3a053e 100644 --- a/cpp/examples/bag2mcap.cpp +++ b/cpp/examples/bag2mcap.cpp @@ -16,8 +16,11 @@ mcap::Timestamp now() { int main() { mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunked = true; // set this to false to write an unchunked file + std::ofstream out("output.mcap", std::ios::binary); - writer.open(out, mcap::McapWriterOptions("ros1")); + writer.open(out, options); mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); writer.addChannel(topic); diff --git a/cpp/mcap/include/mcap/errors.hpp b/cpp/mcap/include/mcap/errors.hpp index 74c8eef436..032b9d622e 100644 --- a/cpp/mcap/include/mcap/errors.hpp +++ b/cpp/mcap/include/mcap/errors.hpp @@ -1,69 +1,42 @@ #pragma once #include -#include namespace mcap { -enum class ErrorCode { +enum class StatusCode { Success = 0, NotOpen = 1, InvalidChannelId = 2, }; -} // namespace mcap - -namespace std { -// Register mcap::ErrorCode with the standard error code system -template <> -struct is_error_code_enum : true_type {}; -} // namespace std - -namespace mcap { - -namespace detail { - -// Define a custom error code category derived from std::error_category -class McapErrorCategory : public std::error_category { -public: - virtual const char* name() const noexcept override final { - return "McapError"; - } - - virtual std::string message(int c) const override final { - switch (static_cast(c)) { - case ErrorCode::Success: - return "success"; - case ErrorCode::NotOpen: - return "not open"; - case ErrorCode::InvalidChannelId: - return "invalid channel id"; +struct Status { + StatusCode code; + std::string message; + + Status() + : code(StatusCode::Success) {} + + Status(StatusCode code) + : code(code) { + switch (code) { + case StatusCode::Success: + break; + case StatusCode::NotOpen: + message = "not open"; + break; + case StatusCode::InvalidChannelId: + message = "invalid channel id"; + break; default: - return "unknown"; + message = "unknown"; + break; } } - virtual std::error_condition default_error_condition(int c) const noexcept override final { - switch (static_cast(c)) { - case ErrorCode::NotOpen: - return make_error_condition(std::errc::bad_file_descriptor); - case ErrorCode::InvalidChannelId: - return make_error_condition(std::errc::invalid_argument); - default: - return std::error_condition(c, *this); - } + bool ok() const { + return code == StatusCode::Success; } }; -} // namespace detail - -const detail::McapErrorCategory& McapErrorCategory() { - static detail::McapErrorCategory c; - return c; -} - -inline std::error_code make_error_code(ErrorCode e) { - return {int(e), McapErrorCategory()}; -} - } // namespace mcap diff --git a/cpp/mcap/include/mcap/mcap.hpp b/cpp/mcap/include/mcap/mcap.hpp index 1fce1c2913..42b2add2cb 100644 --- a/cpp/mcap/include/mcap/mcap.hpp +++ b/cpp/mcap/include/mcap/mcap.hpp @@ -3,6 +3,7 @@ #include "errors.hpp" #include #include +#include #include #include #include @@ -15,7 +16,8 @@ namespace mcap { constexpr char SpecVersion = '0'; constexpr char LibraryVersion[] = LIBRARY_VERSION; -constexpr char Magic[] = {char(137), 77, 67, 65, 80, SpecVersion, 13, 10}; // "\x89MCAP0\r\n" +constexpr uint8_t Magic[] = {137, 77, 67, 65, 80, SpecVersion, 13, 10}; // "\x89MCAP0\r\n" +constexpr uint64_t DefaultChunkSize = 1024 * 768; using ChannelId = uint16_t; using Timestamp = uint64_t; @@ -69,20 +71,21 @@ struct Message { mcap::Timestamp publishTime; mcap::Timestamp recordTime; uint64_t dataSize; - std::byte* data = nullptr; + const std::byte* data = nullptr; }; struct Chunk { uint64_t uncompressedSize; uint32_t uncompressedCrc; std::string compression; - mcap::ByteArray records; + uint64_t recordsSize; + const std::byte* records = nullptr; }; struct MessageIndex { mcap::ChannelId channelId; uint32_t count; - std::unordered_map records; + std::vector> records; }; struct ChunkIndex { @@ -102,7 +105,7 @@ struct Attachment { mcap::Timestamp recordTime; std::string contentType; uint64_t dataSize; - std::byte* data = nullptr; + const std::byte* data = nullptr; }; struct AttachmentIndex { @@ -111,6 +114,13 @@ struct AttachmentIndex { std::string name; std::string contentType; mcap::ByteOffset offset; + + AttachmentIndex(const Attachment& attachment, mcap::ByteOffset fileOffset) + : recordTime(attachment.recordTime) + , attachmentSize(attachment.dataSize) + , name(attachment.name) + , contentType(attachment.contentType) + , offset(fileOffset) {} }; struct Statistics { @@ -123,29 +133,89 @@ struct Statistics { struct UnknownRecord { uint8_t opcode; - mcap::ByteArray data; + uint64_t dataSize; + std::byte* data = nullptr; }; struct McapWriterOptions { - bool indexed; + bool chunked; + uint64_t chunkSize; std::string profile; std::string library; mcap::KeyValueMap metadata; McapWriterOptions(const std::string_view profile) - : indexed(false) + : chunked(true) + , chunkSize(DefaultChunkSize) , profile(profile) , library("libmcap " LIBRARY_VERSION) {} }; +struct IWritable { + virtual inline ~IWritable() = default; + + virtual void write(const std::byte* data, uint64_t size) = 0; + virtual void end() = 0; + virtual uint64_t size() const = 0; +}; + +struct IReadable { + virtual inline ~IReadable() = default; + + virtual uint64_t size() const = 0; + virtual uint64_t read(std::byte* output, uint64_t size) = 0; +}; + +/** + * @brief An in-memory IWritable implementation backed by a growable buffer. + */ +class BufferedWriter final : public IWritable { +public: + void write(const std::byte* data, uint64_t size) override; + uint64_t size() const override; + void end() override; + const std::byte* data() const; + +private: + std::vector buffer_; +}; + +/** + * @brief Implements the IWritable interface used by McapWriter by wrapping a + * std::ostream stream. + */ +class StreamWriter final : public IWritable { +public: + StreamWriter(std::ostream& stream); + ~StreamWriter() override = default; + + void write(const std::byte* data, uint64_t size) override; + void end() override; + uint64_t size() const override; + +private: + std::ostream& stream_; + uint64_t size_ = 0; +}; + class McapWriter final { public: ~McapWriter(); + /** + * @brief Open a new MCAP file for writing and write the header. + * + * @param writer An implementation of the IWritable interface. Output bytes + * will be written to this object. + * @param options Options for MCAP writing. `profile` is required. + */ + void open(mcap::IWritable& writer, const McapWriterOptions& options); + /** * @brief Open a new MCAP file for writing and write the header. * * @param stream Output stream to write to. + * @param options Options for MCAP writing. `profile` is required. */ void open(std::ostream& stream, const McapWriterOptions& options); @@ -169,7 +239,7 @@ class McapWriter final { * @param msg Message to add. * @return A non-zero error code on failure. */ - std::error_code write(const mcap::Message& message); + mcap::Status write(const mcap::Message& message); /** * @brief Write an attachment to the output stream. @@ -177,25 +247,44 @@ class McapWriter final { * @param attachment Attachment to add. * @return A non-zero error code on failure. */ - std::error_code write(const mcap::Attachment& attachment); + mcap::Status write(const mcap::Attachment& attachment); private: - std::ostream* stream_ = nullptr; + uint64_t chunkSize_ = DefaultChunkSize; + mcap::IWritable* output_ = nullptr; + std::unique_ptr streamOutput_; std::vector channels_; - std::unordered_set writtenChannels_; - - void writeMagic(); - - void write(const mcap::Header& header); - void write(const mcap::Footer& footer); - void write(const mcap::ChannelInfo& info); - void write(const std::string_view str); - void write(OpCode value); - void write(uint16_t value); - void write(uint32_t value); - void write(uint64_t value); - void write(std::byte* data, uint64_t size); - void write(const KeyValueMap& map, uint32_t size = 0); + std::vector attachmentIndex_; + std::vector chunkIndex_; + Statistics statistics_{}; + mcap::BufferedWriter currentChunk_; + std::unordered_map currentMessageIndex_; + uint64_t currentChunkStart_ = std::numeric_limits::max(); + uint64_t currentChunkEnd_ = std::numeric_limits::min(); + + void writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkData); + + static void writeMagic(mcap::IWritable& output); + + static void write(mcap::IWritable& output, const mcap::Header& header); + static void write(mcap::IWritable& output, const mcap::Footer& footer); + static void write(mcap::IWritable& output, const mcap::ChannelInfo& info); + static void write(mcap::IWritable& output, const mcap::Message& message); + static void write(mcap::IWritable& output, const mcap::Attachment& attachment); + static void write(mcap::IWritable& output, const mcap::Chunk& chunk); + static void write(mcap::IWritable& output, const mcap::MessageIndex& index); + static void write(mcap::IWritable& output, const mcap::ChunkIndex& index); + static void write(mcap::IWritable& output, const mcap::AttachmentIndex& index); + static void write(mcap::IWritable& output, const mcap::Statistics& stats); + static void write(mcap::IWritable& output, const mcap::UnknownRecord& record); + + static void write(mcap::IWritable& output, const std::string_view str); + static void write(mcap::IWritable& output, OpCode value); + static void write(mcap::IWritable& output, uint16_t value); + static void write(mcap::IWritable& output, uint32_t value); + static void write(mcap::IWritable& output, uint64_t value); + static void write(mcap::IWritable& output, const std::byte* data, uint64_t size); + static void write(mcap::IWritable& output, const KeyValueMap& map, uint32_t size = 0); }; } // namespace mcap diff --git a/cpp/mcap/include/mcap/mcap.inl b/cpp/mcap/include/mcap/mcap.inl index fe320db32f..a4cd69d9b4 100644 --- a/cpp/mcap/include/mcap/mcap.inl +++ b/cpp/mcap/include/mcap/mcap.inl @@ -9,20 +9,61 @@ McapWriter::~McapWriter() { close(); } +void McapWriter::open(mcap::IWritable& writer, const McapWriterOptions& options) { + chunkSize_ = options.chunked ? options.chunkSize : 0; + output_ = &writer; + writeMagic(writer); + write(writer, Header{options.profile, options.library, options.metadata}); +} + void McapWriter::open(std::ostream& stream, const McapWriterOptions& options) { - stream_ = &stream; - writeMagic(); - write(Header{options.profile, options.library, options.metadata}); + streamOutput_ = std::make_unique(stream); + open(*streamOutput_, options); } void McapWriter::close() { - if (!stream_) { + if (!output_) { return; } - write(mcap::Footer{0, 0}); - writeMagic(); - stream_->flush(); - stream_ = nullptr; + auto& output = *output_; + + // Check if there is an open chunk that needs to be closed + if (currentChunk_.size() > 0) { + writeChunk(output, currentChunk_); + currentChunk_.end(); + } + + // Get the offset of the End Of File section + const auto indexOffset = output.size(); + + // Write all channel info records + for (const auto& channel : channels_) { + write(output, channel); + } + + // Write chunk index records + for (const auto& chunkIndexRecord : chunkIndex_) { + write(output, chunkIndexRecord); + } + + // Write attachment index records + for (const auto& attachmentIndexRecord : attachmentIndex_) { + write(output, attachmentIndexRecord); + } + + // Write the statistics record + write(output, statistics_); + + // Calculate the index CRC + const uint32_t indexCrc = 0; + + // Write the footer and trailing magic + write(output, mcap::Footer{indexOffset, indexCrc}); + writeMagic(output); + + output.end(); + output_ = nullptr; + streamOutput_.reset(); } void McapWriter::addChannel(mcap::ChannelInfo& info) { @@ -30,51 +71,72 @@ void McapWriter::addChannel(mcap::ChannelInfo& info) { channels_.push_back(info); } -std::error_code McapWriter::write(const mcap::Message& message) { - if (!stream_) { - return make_error_code(ErrorCode::NotOpen); +mcap::Status McapWriter::write(const mcap::Message& message) { + if (!output_) { + return StatusCode::NotOpen; } + auto& output = chunkSize_ > 0 ? currentChunk_ : *output_; + auto& channelMessageCounts = statistics_.channelMessageCounts; // Write out channel info if we have not yet done so - if (writtenChannels_.find(message.channelId) == writtenChannels_.end()) { + if (channelMessageCounts.find(message.channelId) == channelMessageCounts.end()) { const size_t index = message.channelId - 1; if (index >= channels_.size()) { - return make_error_code(ErrorCode::InvalidChannelId); + return StatusCode::InvalidChannelId; } - write(channels_[index]); - writtenChannels_.insert(message.channelId); + write(output, channels_[index]); + channelMessageCounts.emplace(message.channelId, 0); + ++statistics_.channelCount; } - const uint64_t recordSize = 2 + 4 + 8 + 8 + message.dataSize; + const uint64_t messageOffset = output.size(); + + // Write the message and update statistics + write(output, message); + ++statistics_.messageCount; + channelMessageCounts[message.channelId] += 1; + + if (chunkSize_ > 0) { + // Update the message index + auto& messageIndex = currentMessageIndex_[message.channelId]; + messageIndex.channelId = message.channelId; + ++messageIndex.count; + messageIndex.records.emplace_back(message.recordTime, messageOffset); - write(OpCode::Message); - write(recordSize); - write(message.channelId); - write(message.sequence); - write(message.publishTime); - write(message.recordTime); - write(message.data, message.dataSize); + // Update the chunk index start/end times + currentChunkStart_ = std::min(currentChunkStart_, message.recordTime); + currentChunkEnd_ = std::max(currentChunkEnd_, message.recordTime); + + // Check if the current chunk is ready to close + if (currentChunk_.size() >= chunkSize_) { + writeChunk(*output_, currentChunk_); + currentChunk_.end(); + } + } - return ErrorCode::Success; + return StatusCode::Success; } -std::error_code McapWriter::write(const mcap::Attachment& attachment) { - if (!stream_) { - return make_error_code(ErrorCode::NotOpen); +mcap::Status McapWriter::write(const mcap::Attachment& attachment) { + if (!output_) { + return StatusCode::NotOpen; } + auto& output = *output_; - const uint64_t recordSize = - 4 + attachment.name.size() + 8 + 4 + attachment.contentType.size() + attachment.dataSize; + // Check if we have an open chunk that needs to be closed + if (chunkSize_ > 0 && currentChunk_.size() > 0) { + writeChunk(output, currentChunk_); + currentChunk_.end(); + } + + const uint64_t fileOffset = output.size(); - write(OpCode::Attachment); - write(recordSize); - write(attachment.name); - write(attachment.recordTime); - write(attachment.contentType); - write(attachment.data, attachment.dataSize); + write(output, attachment); + ++statistics_.attachmentCount; + attachmentIndex_.emplace_back(attachment, fileOffset); - return ErrorCode::Success; + return StatusCode::Success; } // Private methods ///////////////////////////////////////////////////////////// @@ -91,78 +153,269 @@ uint32_t KeyValueMapSize(const KeyValueMap& map) { } // namespace internal -void McapWriter::writeMagic() { - stream_->write(Magic, sizeof(Magic)); +void McapWriter::writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkData) { + uint64_t uncompressedSize = chunkData.size(); + uint32_t uncompressedCrc = 0; + std::string compression = ""; + uint64_t recordsSize = uncompressedSize; + const std::byte* records = chunkData.data(); + + // Write the chunk + const uint64_t chunkOffset = output.size(); + write(output, Chunk{uncompressedSize, uncompressedCrc, compression, recordsSize, records}); + const uint64_t chunkSize = output.size() - chunkOffset; + ++statistics_.chunkCount; + + // Create a chunk index record + auto& chunkIndexRecord = chunkIndex_.emplace_back(); + + // Write the message index records + const uint64_t messageIndexOffset = output.size(); + for (const auto& [channelId, messageIndex] : currentMessageIndex_) { + chunkIndexRecord.messageIndexOffsets.emplace(channelId, output.size()); + write(output, messageIndex); + } + currentMessageIndex_.clear(); + const uint64_t messageIndexLength = output.size() - messageIndexOffset; + + chunkIndexRecord.startTime = currentChunkStart_; + chunkIndexRecord.endTime = currentChunkEnd_; + chunkIndexRecord.chunkOffset = chunkOffset; + chunkIndexRecord.messageIndexLength = messageIndexLength; + chunkIndexRecord.compression = compression; + chunkIndexRecord.compressedSize = recordsSize; + chunkIndexRecord.uncompressedSized = uncompressedSize; + chunkIndexRecord.crc = 0; + + // Reset start/end times for the next chunk + currentChunkStart_ = std::numeric_limits::max(); + currentChunkEnd_ = std::numeric_limits::min(); +} + +void McapWriter::writeMagic(mcap::IWritable& output) { + write(output, reinterpret_cast(Magic), sizeof(Magic)); } -void McapWriter::write(const mcap::Header& header) { +void McapWriter::write(mcap::IWritable& output, const mcap::Header& header) { const uint32_t metadataSize = internal::KeyValueMapSize(header.metadata); const uint64_t recordSize = 4 + header.profile.size() + 4 + header.library.size() + 4 + metadataSize; - write(OpCode::Header); - write(recordSize); - write(header.profile); - write(header.library); - write(header.metadata, metadataSize); + write(output, OpCode::Header); + write(output, recordSize); + write(output, header.profile); + write(output, header.library); + write(output, header.metadata, metadataSize); } -void McapWriter::write(const mcap::Footer& footer) { - write(OpCode::Footer); - write(uint64_t(12)); - write(footer.indexOffset); - write(footer.indexCrc); +void McapWriter::write(mcap::IWritable& output, const mcap::Footer& footer) { + write(output, OpCode::Footer); + write(output, uint64_t(12)); + write(output, footer.indexOffset); + write(output, footer.indexCrc); } -void McapWriter::write(const mcap::ChannelInfo& info) { +void McapWriter::write(mcap::IWritable& output, const mcap::ChannelInfo& info) { const uint32_t userDataSize = internal::KeyValueMapSize(info.userData); const uint64_t recordSize = 2 + 4 + info.topicName.size() + 4 + info.encoding.size() + 4 + info.schemaName.size() + 4 + info.schema.size() + 4 + userDataSize + 4; const uint32_t crc = 0; - write(OpCode::ChannelInfo); - write(recordSize); - write(info.channelId); - write(info.topicName); - write(info.encoding); - write(info.schemaName); - write(info.schema); - write(info.userData, userDataSize); - write(crc); + write(output, OpCode::ChannelInfo); + write(output, recordSize); + write(output, info.channelId); + write(output, info.topicName); + write(output, info.encoding); + write(output, info.schemaName); + write(output, info.schema); + write(output, info.userData, userDataSize); + write(output, crc); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::Message& message) { + const uint64_t recordSize = 2 + 4 + 8 + 8 + message.dataSize; + + write(output, OpCode::Message); + write(output, recordSize); + write(output, message.channelId); + write(output, message.sequence); + write(output, message.publishTime); + write(output, message.recordTime); + write(output, message.data, message.dataSize); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::Attachment& attachment) { + const uint64_t recordSize = + 4 + attachment.name.size() + 8 + 4 + attachment.contentType.size() + attachment.dataSize; + + write(output, OpCode::Attachment); + write(output, recordSize); + write(output, attachment.name); + write(output, attachment.recordTime); + write(output, attachment.contentType); + write(output, attachment.data, attachment.dataSize); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::Chunk& chunk) { + const uint64_t recordSize = 8 + 4 + 4 + chunk.compression.size() + chunk.recordsSize; + + write(output, OpCode::Chunk); + write(output, recordSize); + write(output, chunk.uncompressedSize); + write(output, chunk.uncompressedCrc); + write(output, chunk.compression); + write(output, chunk.records, chunk.recordsSize); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::MessageIndex& index) { + const uint32_t recordsSize = index.records.size() * 16; + const uint64_t recordSize = 2 + 4 + 4 + recordsSize + 4; + const uint32_t crc = 0; + + write(output, OpCode::MessageIndex); + write(output, recordSize); + write(output, index.channelId); + write(output, index.count); + + write(output, recordsSize); + for (const auto& [timestamp, offset] : index.records) { + write(output, timestamp); + write(output, offset); + } + + write(output, crc); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::ChunkIndex& index) { + const uint32_t messageIndexOffsetsSize = index.messageIndexOffsets.size() * 10; + const uint64_t recordSize = + 8 + 8 + 8 + 4 + messageIndexOffsetsSize + 8 + 4 + index.compression.size() + 8 + 8 + 4; + const uint32_t crc = 0; + + write(output, OpCode::ChunkIndex); + write(output, recordSize); + write(output, index.startTime); + write(output, index.endTime); + write(output, index.chunkOffset); + + write(output, messageIndexOffsetsSize); + for (const auto& [channelId, offset] : index.messageIndexOffsets) { + write(output, channelId); + write(output, offset); + } + + write(output, index.messageIndexLength); + write(output, index.compression); + write(output, index.compressedSize); + write(output, index.uncompressedSized); + write(output, crc); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::AttachmentIndex& index) { + const uint64_t recordSize = 8 + 8 + 4 + index.name.size() + 4 + index.contentType.size() + 8; + + write(output, OpCode::AttachmentIndex); + write(output, recordSize); + write(output, index.recordTime); + write(output, index.attachmentSize); + write(output, index.name); + write(output, index.contentType); + write(output, index.offset); +} + +void McapWriter::write(mcap::IWritable& output, const mcap::Statistics& stats) { + const uint32_t channelMessageCountsSize = stats.channelMessageCounts.size() * 10; + const uint64_t recordSize = 8 + 4 + 4 + 4 + 4 + channelMessageCountsSize; + + write(output, OpCode::Statistics); + write(output, recordSize); + write(output, stats.messageCount); + write(output, stats.channelCount); + write(output, stats.attachmentCount); + write(output, stats.chunkCount); + + write(output, channelMessageCountsSize); + for (const auto& [channelId, messageCount] : stats.channelMessageCounts) { + write(output, channelId); + write(output, messageCount); + } +} + +void McapWriter::write(mcap::IWritable& output, const mcap::UnknownRecord& record) { + write(output, mcap::OpCode(record.opcode)); + write(output, record.dataSize); + write(output, record.data, record.dataSize); } -void McapWriter::write(const std::string_view str) { - write(uint32_t(str.size())); - stream_->write(str.data(), str.size()); +void McapWriter::write(mcap::IWritable& output, const std::string_view str) { + write(output, uint32_t(str.size())); + output.write(reinterpret_cast(str.data()), str.size()); } -void McapWriter::write(OpCode value) { - stream_->write(reinterpret_cast(&value), sizeof(value)); +void McapWriter::write(mcap::IWritable& output, OpCode value) { + output.write(reinterpret_cast(&value), sizeof(value)); } -void McapWriter::write(uint16_t value) { - stream_->write(reinterpret_cast(&value), sizeof(value)); +void McapWriter::write(mcap::IWritable& output, uint16_t value) { + output.write(reinterpret_cast(&value), sizeof(value)); } -void McapWriter::write(uint32_t value) { - stream_->write(reinterpret_cast(&value), sizeof(value)); +void McapWriter::write(mcap::IWritable& output, uint32_t value) { + output.write(reinterpret_cast(&value), sizeof(value)); } -void McapWriter::write(uint64_t value) { - stream_->write(reinterpret_cast(&value), sizeof(value)); +void McapWriter::write(mcap::IWritable& output, uint64_t value) { + output.write(reinterpret_cast(&value), sizeof(value)); } -void McapWriter::write(std::byte* data, uint64_t size) { - stream_->write(reinterpret_cast(data), size); +void McapWriter::write(mcap::IWritable& output, const std::byte* data, uint64_t size) { + output.write(reinterpret_cast(data), size); } -void McapWriter::write(const KeyValueMap& map, uint32_t size) { - write(size > 0 ? size : internal::KeyValueMapSize(map)); +void McapWriter::write(mcap::IWritable& output, const KeyValueMap& map, uint32_t size) { + write(output, size > 0 ? size : internal::KeyValueMapSize(map)); for (const auto& [key, value] : map) { - write(key); - write(value); + write(output, key); + write(output, value); } } +// BufferedWriter ////////////////////////////////////////////////////////////// + +void BufferedWriter::write(const std::byte* data, uint64_t size) { + buffer_.insert(buffer_.end(), data, data + size); +} + +uint64_t BufferedWriter::size() const { + return buffer_.size(); +} + +void BufferedWriter::end() { + buffer_.clear(); +} + +const std::byte* BufferedWriter::data() const { + return buffer_.data(); +} + +// StreamWriter //////////////////////////////////////////////////////////////// + +StreamWriter::StreamWriter(std::ostream& stream) + : stream_(stream) + , size_(0) {} + +void StreamWriter::write(const std::byte* data, uint64_t size) { + stream_.write(reinterpret_cast(data), std::streamsize(size)); + size_ += size; +} + +void StreamWriter::end() { + stream_.flush(); +} + +uint64_t StreamWriter::size() const { + return size_; +} + } // namespace mcap From 9323678f81b3498766651f9c1567855855e3c42e Mon Sep 17 00:00:00 2001 From: John Hurliman Date: Sun, 23 Jan 2022 12:26:34 -0800 Subject: [PATCH 028/635] [C++] Add noChunking and noIndexing options (#54) --- cpp/bench/run.cpp | 62 ++++++++++++++- cpp/examples/bag2mcap.cpp | 1 - cpp/mcap/include/mcap/mcap.hpp | 15 +++- cpp/mcap/include/mcap/mcap.inl | 134 +++++++++++++++++++-------------- 4 files changed, 147 insertions(+), 65 deletions(-) diff --git a/cpp/bench/run.cpp b/cpp/bench/run.cpp index 9a04504389..44d3c58040 100644 --- a/cpp/bench/run.cpp +++ b/cpp/bench/run.cpp @@ -19,7 +19,7 @@ static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { // Create an unchunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.chunked = false; + options.noChunking = true; // Open an output memory buffer and write the file header mcap::BufferedWriter out{}; @@ -61,7 +61,51 @@ static void BM_McapWriterBufferedWriterChunked(benchmark::State& state) { // Create a chunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.chunked = true; + options.noChunking = false; + options.chunkSize = uint64_t(state.range(1)); + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + const auto iterations = size_t(state.range(0)); + + while (state.KeepRunning()) { + for (size_t i = 0; i < iterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterBufferedWriterChunkedUnindexed(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create a chunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.noChunking = false; + options.noIndexing = true; options.chunkSize = uint64_t(state.range(1)); // Open an output memory buffer and write the file header @@ -104,7 +148,7 @@ static void BM_McapWriterStreamWriterUnchunked(benchmark::State& state) { // Create an unchunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.chunked = false; + options.noChunking = true; // Open an output file stream and write the file header std::ofstream out("benchmark.mcap", std::ios::binary); @@ -147,7 +191,7 @@ static void BM_McapWriterStreamWriterChunked(benchmark::State& state) { // Create a chunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.chunked = true; + options.noChunking = false; options.chunkSize = uint64_t(state.range(1)); // Open an output file stream and write the file header @@ -195,6 +239,16 @@ int main(int argc, char* argv[]) { ->Args({10000, 100000}) ->Args({10000, 1000000}) ->Args({10000, 10000000}); + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterChunkedUnindexed", + BM_McapWriterBufferedWriterChunkedUnindexed) + ->Args({10000, 1}) + ->Args({10000, 10}) + ->Args({10000, 100}) + ->Args({10000, 1000}) + ->Args({10000, 10000}) + ->Args({10000, 100000}) + ->Args({10000, 1000000}) + ->Args({10000, 10000000}); benchmark::RegisterBenchmark("BM_McapWriterStreamWriterUnchunked", BM_McapWriterStreamWriterUnchunked) ->Arg(10000); diff --git a/cpp/examples/bag2mcap.cpp b/cpp/examples/bag2mcap.cpp index 07ac3a053e..c38af3b0d9 100644 --- a/cpp/examples/bag2mcap.cpp +++ b/cpp/examples/bag2mcap.cpp @@ -17,7 +17,6 @@ int main() { mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.chunked = true; // set this to false to write an unchunked file std::ofstream out("output.mcap", std::ios::binary); writer.open(out, options); diff --git a/cpp/mcap/include/mcap/mcap.hpp b/cpp/mcap/include/mcap/mcap.hpp index 42b2add2cb..e69c356847 100644 --- a/cpp/mcap/include/mcap/mcap.hpp +++ b/cpp/mcap/include/mcap/mcap.hpp @@ -25,6 +25,12 @@ using ByteOffset = uint64_t; using KeyValueMap = std::unordered_map; using ByteArray = std::vector; +enum struct Compression { + None, + Lz4, + Zstd, +}; + enum struct OpCode : uint8_t { Header = 0x01, Footer = 0x02, @@ -138,15 +144,19 @@ struct UnknownRecord { }; struct McapWriterOptions { - bool chunked; + bool noChunking; + bool noIndexing; uint64_t chunkSize; + Compression compression; std::string profile; std::string library; mcap::KeyValueMap metadata; McapWriterOptions(const std::string_view profile) - : chunked(true) + : noChunking(false) + , noIndexing(false) , chunkSize(DefaultChunkSize) + , compression(Compression::None) , profile(profile) , library("libmcap " LIBRARY_VERSION) {} }; @@ -261,6 +271,7 @@ class McapWriter final { std::unordered_map currentMessageIndex_; uint64_t currentChunkStart_ = std::numeric_limits::max(); uint64_t currentChunkEnd_ = std::numeric_limits::min(); + bool indexing_ = true; void writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkData); diff --git a/cpp/mcap/include/mcap/mcap.inl b/cpp/mcap/include/mcap/mcap.inl index a4cd69d9b4..b8a03a7adc 100644 --- a/cpp/mcap/include/mcap/mcap.inl +++ b/cpp/mcap/include/mcap/mcap.inl @@ -10,7 +10,8 @@ McapWriter::~McapWriter() { } void McapWriter::open(mcap::IWritable& writer, const McapWriterOptions& options) { - chunkSize_ = options.chunked ? options.chunkSize : 0; + chunkSize_ = options.noChunking ? 0 : options.chunkSize; + indexing_ = !options.noIndexing; output_ = &writer; writeMagic(writer); write(writer, Header{options.profile, options.library, options.metadata}); @@ -33,29 +34,33 @@ void McapWriter::close() { currentChunk_.end(); } - // Get the offset of the End Of File section - const auto indexOffset = output.size(); + uint64_t indexOffset = 0; + uint32_t indexCrc = 0; - // Write all channel info records - for (const auto& channel : channels_) { - write(output, channel); - } + if (indexing_) { + // Get the offset of the End Of File section + indexOffset = output.size(); - // Write chunk index records - for (const auto& chunkIndexRecord : chunkIndex_) { - write(output, chunkIndexRecord); - } + // Write all channel info records + for (const auto& channel : channels_) { + write(output, channel); + } - // Write attachment index records - for (const auto& attachmentIndexRecord : attachmentIndex_) { - write(output, attachmentIndexRecord); - } + // Write chunk index records + for (const auto& chunkIndexRecord : chunkIndex_) { + write(output, chunkIndexRecord); + } - // Write the statistics record - write(output, statistics_); + // Write attachment index records + for (const auto& attachmentIndexRecord : attachmentIndex_) { + write(output, attachmentIndexRecord); + } + + // Write the statistics record + write(output, statistics_); + } - // Calculate the index CRC - const uint32_t indexCrc = 0; + // TODO: Calculate the index CRC // Write the footer and trailing magic write(output, mcap::Footer{indexOffset, indexCrc}); @@ -92,21 +97,27 @@ mcap::Status McapWriter::write(const mcap::Message& message) { const uint64_t messageOffset = output.size(); - // Write the message and update statistics + // Write the message write(output, message); - ++statistics_.messageCount; - channelMessageCounts[message.channelId] += 1; - if (chunkSize_ > 0) { - // Update the message index - auto& messageIndex = currentMessageIndex_[message.channelId]; - messageIndex.channelId = message.channelId; - ++messageIndex.count; - messageIndex.records.emplace_back(message.recordTime, messageOffset); + // Update statistics + if (indexing_) { + ++statistics_.messageCount; + channelMessageCounts[message.channelId] += 1; + } - // Update the chunk index start/end times - currentChunkStart_ = std::min(currentChunkStart_, message.recordTime); - currentChunkEnd_ = std::max(currentChunkEnd_, message.recordTime); + if (chunkSize_ > 0) { + if (indexing_) { + // Update the message index + auto& messageIndex = currentMessageIndex_[message.channelId]; + messageIndex.channelId = message.channelId; + ++messageIndex.count; + messageIndex.records.emplace_back(message.recordTime, messageOffset); + + // Update the chunk index start/end times + currentChunkStart_ = std::min(currentChunkStart_, message.recordTime); + currentChunkEnd_ = std::max(currentChunkEnd_, message.recordTime); + } // Check if the current chunk is ready to close if (currentChunk_.size() >= chunkSize_) { @@ -125,7 +136,7 @@ mcap::Status McapWriter::write(const mcap::Attachment& attachment) { auto& output = *output_; // Check if we have an open chunk that needs to be closed - if (chunkSize_ > 0 && currentChunk_.size() > 0) { + if (currentChunk_.size() > 0) { writeChunk(output, currentChunk_); currentChunk_.end(); } @@ -133,8 +144,11 @@ mcap::Status McapWriter::write(const mcap::Attachment& attachment) { const uint64_t fileOffset = output.size(); write(output, attachment); - ++statistics_.attachmentCount; - attachmentIndex_.emplace_back(attachment, fileOffset); + + if (indexing_) { + ++statistics_.attachmentCount; + attachmentIndex_.emplace_back(attachment, fileOffset); + } return StatusCode::Success; } @@ -163,33 +177,37 @@ void McapWriter::writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& // Write the chunk const uint64_t chunkOffset = output.size(); write(output, Chunk{uncompressedSize, uncompressedCrc, compression, recordsSize, records}); - const uint64_t chunkSize = output.size() - chunkOffset; - ++statistics_.chunkCount; - // Create a chunk index record - auto& chunkIndexRecord = chunkIndex_.emplace_back(); + if (indexing_) { + // Update statistics + const uint64_t chunkSize = output.size() - chunkOffset; + ++statistics_.chunkCount; - // Write the message index records - const uint64_t messageIndexOffset = output.size(); - for (const auto& [channelId, messageIndex] : currentMessageIndex_) { - chunkIndexRecord.messageIndexOffsets.emplace(channelId, output.size()); - write(output, messageIndex); + // Create a chunk index record + auto& chunkIndexRecord = chunkIndex_.emplace_back(); + + // Write the message index records + const uint64_t messageIndexOffset = output.size(); + for (const auto& [channelId, messageIndex] : currentMessageIndex_) { + chunkIndexRecord.messageIndexOffsets.emplace(channelId, output.size()); + write(output, messageIndex); + } + currentMessageIndex_.clear(); + const uint64_t messageIndexLength = output.size() - messageIndexOffset; + + chunkIndexRecord.startTime = currentChunkStart_; + chunkIndexRecord.endTime = currentChunkEnd_; + chunkIndexRecord.chunkOffset = chunkOffset; + chunkIndexRecord.messageIndexLength = messageIndexLength; + chunkIndexRecord.compression = compression; + chunkIndexRecord.compressedSize = recordsSize; + chunkIndexRecord.uncompressedSized = uncompressedSize; + chunkIndexRecord.crc = 0; + + // Reset start/end times for the next chunk + currentChunkStart_ = std::numeric_limits::max(); + currentChunkEnd_ = std::numeric_limits::min(); } - currentMessageIndex_.clear(); - const uint64_t messageIndexLength = output.size() - messageIndexOffset; - - chunkIndexRecord.startTime = currentChunkStart_; - chunkIndexRecord.endTime = currentChunkEnd_; - chunkIndexRecord.chunkOffset = chunkOffset; - chunkIndexRecord.messageIndexLength = messageIndexLength; - chunkIndexRecord.compression = compression; - chunkIndexRecord.compressedSize = recordsSize; - chunkIndexRecord.uncompressedSized = uncompressedSize; - chunkIndexRecord.crc = 0; - - // Reset start/end times for the next chunk - currentChunkStart_ = std::numeric_limits::max(); - currentChunkEnd_ = std::numeric_limits::min(); } void McapWriter::writeMagic(mcap::IWritable& output) { From f4d26c91b1059e446ab6cbc3709871a305e7585b Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Mon, 24 Jan 2022 10:11:56 -0800 Subject: [PATCH 029/635] spec: remove crc from Channel Info, Message index, and Chunk Index (#55) Crc fields provide an additional layer of integrity checking on written data. Rather than add crc fields to every record, we provide crc fields in specific records to allow for opt-in crc checking. If you want to opt-into crc validation for your messages, write messages into chunks (uncompressed chunks are valid chunks). If you opt into index records, the footer contains a crc for all the written index data. Attachment records contain a crc because they cannot appear in a chunk. Fixes #43 --- docs/specification/README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index fab1cdeeef..89c166c049 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -128,7 +128,6 @@ Identifies a stream of messages on a particular topic and includes information a | 4 + N | schema_name | String | Schema Name | std_msgs/Header | | 4 + N | schema | uint32 length-prefixed bytes | Schema | | | N | user_data | KeyValues | Metadata about this channel | used to encode protocol-specific details like callerid, latching, QoS profiles... Refer to [supported profiles][profiles]. | -| 4 | crc | uint32 | CRC32 checksum of preceding fields in the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | | #### Message (op=0x04) @@ -164,7 +163,6 @@ The Message Index record maps timestamps to message offsets. One message index r | 2 | channel_id | uint16 | Channel ID. | | 4 | count | uint32 | Number of records in the chunk, on this channel. | | N | records | KeyValues | Array of record_time and offset for each record. Offset is relative to the start of the uncompressed chunk data. | -| 4 | crc | uint32 | CRC32 checksum of preceding fields in the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | #### Chunk Index (op=0x07) @@ -180,7 +178,6 @@ The Chunk Index records form a coarse index of timestamps to chunk offsets, alon | 4 + N | compression | String | The compression used on this chunk. Refer to [supported compression formats][compression formats]. | | 8 | compressed_size | uint64 | The compressed size of the chunk. | | 8 | uncompressed_size | uint64 | The uncompressed size of the chunk. | -| 4 | crc | uint32 | CRC32 checksum of the preceding fields within the record (not including the record opcode and length prefix). A value of zero indicates that CRC validation should not be performed. | #### Attachment (op=0x08) From 83afa7706f7c23c74b011e057b024437338ab0f3 Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Mon, 24 Jan 2022 13:08:27 -0800 Subject: [PATCH 030/635] Add an Array type to use where KeyValues is not appropriate (#58) Remove _count_ from MessageIndex Fixes: #45 Fixes: #46 --- docs/specification/README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 89c166c049..06074d3699 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -85,13 +85,19 @@ The section below uses the following data types and serialization choices. In al - **Timestamp**: uint64 nanoseconds since a user-understood epoch (i.e unix epoch, robot boot time, etc) - **String**: a uint32-prefixed UTF8 string -- **KeyValues**: A uint32 length-prefixed association of key-value pairs, serialized as +- **KeyValues**: A uint32 length-prefixed association of key-value pairs ``` ``` -An empty KeyValues consists of a zero-value length prefix. +- **Array**: A uint32 length-prefixed array. + +``` + +``` + +An empty Array consists of a zero-value length prefix. - **Bytes**: refers to an array of bytes, without a length prefix. If a length prefix is required a designation like "uint32 length-prefixed bytes" will be used. @@ -161,8 +167,7 @@ The Message Index record maps timestamps to message offsets. One message index r | Bytes | Name | Type | Description | | --- | --- | --- | --- | | 2 | channel_id | uint16 | Channel ID. | -| 4 | count | uint32 | Number of records in the chunk, on this channel. | -| N | records | KeyValues | Array of record_time and offset for each record. Offset is relative to the start of the uncompressed chunk data. | +| N | records | Array<{ Timestamp, uint64 }> | Array of record_time and offset for each record. Offset is relative to the start of the uncompressed chunk data. | #### Chunk Index (op=0x07) From 4d0597857a2ff929519887cecb44a24ce97a126b Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Mon, 24 Jan 2022 14:49:33 -0800 Subject: [PATCH 031/635] Consolidate chunked and unchunked record types (#61) Consolidates the chunked and unchunked docs in the specification, and enables more flexible opt-in support for various record types. Importantly, chunks can be used without message indexes, and unchunked writers may omit all indexing if desired. --- docs/specification/README.md | 43 ++++++------------- docs/specification/notes/explanatory-notes.md | 2 +- 2 files changed, 14 insertions(+), 31 deletions(-) diff --git a/docs/specification/README.md b/docs/specification/README.md index 06074d3699..5fd4c3abe2 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -33,9 +33,10 @@ Some helpful terms to understand in the following sections are: - **Message**: A type of record representing a timestamped message on a channel (and therefore associated with a topic/schema). A message can be parsed by a reader that has also read the channel info for the channel on which the message appears. - **Chunk**: A record type that wraps a compressed set of channel info and message records. - **Attachment**: Extra data that may be included in the file, outside the chunks. Attachments may be quickly listed and accessed via an index at the end of the file. -- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on topic and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the uncompressed chunk data. - - The attachment index at the end of the file allows for fast listing and location of attachments based on name, timestamp, or attachment type. +- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on channel and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the uncompressed chunk data. The attachment index at the end of the file allows for fast listing and location of attachments based on name, timestamp, or attachment type. +- **Statistics**: A type of record at the end of the file, used to support fast summarization of file contents. +- **Message Data Section**: Used in this doc to refer to the first portion of the file that contains chunks and message data. To be distinguished from the **Index Data Section**. +- **Index Data Section**: The last part of the file, containing records used for searching and summarizing the file. The Index Data section is split into a **channel info portion**, **chunk index portion**, and **attachment index portion** each containing contiguous runs of the corresponding record type, followed by a **Statistics** record. All portions of the index data section are optional, subject to constraints and tradeoffs described below. There are no other record types in the index data section. ## Format Description @@ -51,30 +52,14 @@ These are the magic bytes: The first record in every file must be a Header (op=0x01) and the last record must be a Footer (op=0x02). -MCAP files may be **"chunked"** or **"unchunked"**. Chunked and unchunked files have different constraints on the layout of record types in the file. In chunked files, messages are grouped into optionally-compressed blocks of data before being written to disk. In an unchunked file, each message is written out uncompressed. See the diagrams below for clarity (the record types shown are described in the following section): +MCAP files may contain a variety of record types. Specific constraints on valid usage of the record types is explained in the sections below, but in general record types may be used or not depending on the feature requirements of the consumer. -#### Chunked +The diagrams below show two possible variants - a file that is chunked and indexed, i.e making full use of the features, and one that is unchunked but contains statistics. ![Chunked][diagram chunked] -#### Unchunked - ![Unchunked][diagram unchunked] -Benefits of chunked files include: - -- Support for random access via time- and topic-based indexing. -- Reduced storage requirements when recording or processing data. -- Reduced bandwidth requirements when transferring over a network. -- Possibly higher write performance if the cost of IO outweighs the cost of compression. - -Benefits of unchunked files include: - -- Higher write performance on CPU-constrained systems. -- Less potential for data los in case of a recording crash. No "to-be-compressed" buffer is dropped by the recorder -- though the protocol makes no specification on how the process syncs unchunked messages to disk. - -Unchunked files are less friendly to readers than chunked files due to their lack of an index and greater size. When unchunked files are in use, they may be converted to chunked files in post-processing to mitigate this. - ### Record Types Record types are identified by single-byte **opcodes**. Record opcodes in the range 0x01-0x7F are reserved for future MCAP format usage. 0x80-0xFF are reserved for application extensions and user proposals. @@ -124,7 +109,7 @@ A file without a footer is **corrupt**, indicating the writer process encountere #### Channel Info (op=0x03) -Identifies a stream of messages on a particular topic and includes information about how the messages should be decoded by readers. A channel info record must occur in the file prior to any message that references its Channel ID. Channel IDs must uniquely identify a channel across the entire file. +Identifies a stream of messages on a particular topic and includes information about how the messages should be decoded by readers. A channel info record must occur in the file prior to any message that references its Channel ID. Channel IDs must uniquely identify a channel across the entire file. If message indexing is in use, the Channel Info section of the index data section must also be in use. | Bytes | Name | Type | Description | Example | | --- | --- | --- | --- | --- | @@ -137,9 +122,7 @@ Identifies a stream of messages on a particular topic and includes information a #### Message (op=0x04) -A message record encodes a single timestamped message on a particular channel. Message records may occur inside a Chunk, or outside the chunk in the case of an unchunked file. A chunked file may not have messages outside the chunks. - -Message records must be preceded by a Channel Info record for the given channel ID. That Channel Info record may appear inside the same chunk as the message, or in an earlier chunk in the file. In an unchunked file, both the channel info and message records will be outside chunks, as there will be no chunks. +A message record encodes a single timestamped message on a particular channel. In a given file, messages must appear either inside Chunks, or outside Chunks. A file may not contain both chunked and unchunked messages. | Bytes | Name | Type | Description | | --- | --- | --- | --- | @@ -151,7 +134,7 @@ Message records must be preceded by a Channel Info record for the given channel #### Chunk (op=0x05) -A Chunk is a collection of compressed channel info and message records. +A Chunk is a collection of compressed channel info and message records. If message indexing is in use, Chunks are required. | Bytes | Name | Type | Description | Example | | --- | --- | --- | --- | --- | @@ -162,7 +145,7 @@ A Chunk is a collection of compressed channel info and message records. #### Message Index (op=0x06) -The Message Index record maps timestamps to message offsets. One message index record is written for each channel in the preceding chunk. All message index records for a chunk must immediately follow the chunk. +The Message Index record maps timestamps to message offsets. If message indexing is in use, following each chunk, a message index record is written for each channel in the chunk preceding. All message index records for a chunk must immediately follow the chunk in a contiguous run of records. | Bytes | Name | Type | Description | | --- | --- | --- | --- | @@ -171,7 +154,7 @@ The Message Index record maps timestamps to message offsets. One message index r #### Chunk Index (op=0x07) -The Chunk Index records form a coarse index of timestamps to chunk offsets, along with the locations of the message index records associatiated with those chunks. +The Chunk Index records form a coarse index of timestamps to chunk offsets, along with the locations of the message index records associated with those chunks. They are found in the chunk index portion of the index data section. If message indexing is in use, Chunk Indexes are required. A Chunk Index record must be preceded in the index data section by Channel Info records for any channels that it references. | Bytes | Name | Type | Description | | --- | --- | --- | --- | @@ -198,7 +181,7 @@ Attachments can be used to attach artifacts such as calibration data, text, or c #### Attachment Index (op=0x09) -The attachment index is an index to named attachments within the file. One record is recorded per attachment in the file. +The attachment index is an index to named attachments within the file. One record is recorded per attachment in the file. The attachment index records are written to the attachment index portion of the message data section. | Bytes | Name | Type | Description | | --- | --- | --- | --- | @@ -210,7 +193,7 @@ The attachment index is an index to named attachments within the file. One recor #### Statistics (op=0x0A) -The statistics record contains statistics about the recorded data. It is the last record in the file before the footer. +The statistics record contains statistics about the recorded data. It is the last record in the file before the footer. The record must be preceded in the index data section by Channel Info records for any channels referenced in the `channel_message_counts` field. If this is undesirable but some statistics are still desired, the field may be set to a zero-length map. The statistics record is optional. | Bytes | Name | Type | Description | | --- | --- | --- | --- | diff --git a/docs/specification/notes/explanatory-notes.md b/docs/specification/notes/explanatory-notes.md index b0bb47e374..613ff0eaee 100644 --- a/docs/specification/notes/explanatory-notes.md +++ b/docs/specification/notes/explanatory-notes.md @@ -4,7 +4,7 @@ The following notes may be useful for users of the MCAP format, including implem ## Feature Explanations -The format is intended to support efficient, indexed reading of messages and generation of summary data in both local and remote contexts. "Seeking" should be imagined to incur either a disk seek or an HTTP range request to an object store -- the latter being significantly more costly. +The format is intended to support efficient, indexed reading of messages and generation of summary data in both local and remote contexts. "Seeking" should be imagined to incur either a disk seek or an HTTP range request to an object store -- the latter being significantly more costly. In both random access and summarization, features may be unsupported due to choices taken by the writer of the file. For instance, statistics may not include channel message counts, or there may be no message index present. If the index data section is empty, the index_offset in the file footer will be set to zero. ### Scanning for records on specific topics within an interval From b3361ba19fffd20110d862a68afeac837d008973 Mon Sep 17 00:00:00 2001 From: John Hurliman Date: Mon, 24 Jan 2022 17:48:20 -0800 Subject: [PATCH 032/635] [C++] Add lz4 and zlib compression to McapWriter (#65) --- cpp/bench/run.cpp | 227 +++++++++++++++----- cpp/examples/bag2mcap.cpp | 11 +- cpp/mcap/conanfile.py | 2 +- cpp/mcap/include/mcap/errors.hpp | 4 + cpp/mcap/include/mcap/mcap.hpp | 125 +++++++++-- cpp/mcap/include/mcap/mcap.inl | 358 ++++++++++++++++++++++++++----- 6 files changed, 601 insertions(+), 126 deletions(-) diff --git a/cpp/bench/run.cpp b/cpp/bench/run.cpp index 44d3c58040..56ae78e1c6 100644 --- a/cpp/bench/run.cpp +++ b/cpp/bench/run.cpp @@ -8,8 +8,9 @@ #include constexpr char StringSchema[] = "string data"; +constexpr size_t WriteIterations = 10000; -static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { +static void BM_McapWriterBufferedWriterUnchunkedUnindexed(benchmark::State& state) { // Create a message payload std::array payload; const uint32_t length = 13; @@ -20,6 +21,7 @@ static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); options.noChunking = true; + options.noIndexing = true; // Open an output memory buffer and write the file header mcap::BufferedWriter out{}; @@ -38,10 +40,48 @@ static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { msg.data = payload.data(); msg.dataSize = payload.size(); - const auto iterations = size_t(state.range(0)); + while (state.KeepRunning()) { + for (size_t i = 0; i < WriteIterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterBufferedWriterUnchunked(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create an unchunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.noChunking = true; + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); while (state.KeepRunning()) { - for (size_t i = 0; i < iterations; i++) { + for (size_t i = 0; i < WriteIterations; i++) { writer.write(msg); benchmark::ClobberMemory(); } @@ -61,8 +101,7 @@ static void BM_McapWriterBufferedWriterChunked(benchmark::State& state) { // Create a chunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.noChunking = false; - options.chunkSize = uint64_t(state.range(1)); + options.chunkSize = uint64_t(state.range(0)); // Open an output memory buffer and write the file header mcap::BufferedWriter out{}; @@ -81,10 +120,8 @@ static void BM_McapWriterBufferedWriterChunked(benchmark::State& state) { msg.data = payload.data(); msg.dataSize = payload.size(); - const auto iterations = size_t(state.range(0)); - while (state.KeepRunning()) { - for (size_t i = 0; i < iterations; i++) { + for (size_t i = 0; i < WriteIterations; i++) { writer.write(msg); benchmark::ClobberMemory(); } @@ -104,9 +141,50 @@ static void BM_McapWriterBufferedWriterChunkedUnindexed(benchmark::State& state) // Create a chunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.noChunking = false; options.noIndexing = true; - options.chunkSize = uint64_t(state.range(1)); + options.chunkSize = uint64_t(state.range(0)); + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); + + while (state.KeepRunning()) { + for (size_t i = 0; i < WriteIterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterBufferedWriterLZ4(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create a chunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunkSize = uint64_t(state.range(0)); + options.compression = mcap::Compression::Lz4; + options.compressionLevel = mcap::CompressionLevel(state.range(1)); // Open an output memory buffer and write the file header mcap::BufferedWriter out{}; @@ -125,10 +203,50 @@ static void BM_McapWriterBufferedWriterChunkedUnindexed(benchmark::State& state) msg.data = payload.data(); msg.dataSize = payload.size(); - const auto iterations = size_t(state.range(0)); + while (state.KeepRunning()) { + for (size_t i = 0; i < WriteIterations; i++) { + writer.write(msg); + benchmark::ClobberMemory(); + } + } + + // Finish writing the file to memory + writer.close(); +} + +static void BM_McapWriterBufferedWriterZStd(benchmark::State& state) { + // Create a message payload + std::array payload; + const uint32_t length = 13; + std::memcpy(payload.data(), &length, 4); + std::memcpy(payload.data() + 4, "Hello, world!", 13); + + // Create a chunked writer using the ros1 profile + mcap::McapWriter writer; + auto options = mcap::McapWriterOptions("ros1"); + options.chunkSize = uint64_t(state.range(0)); + options.compression = mcap::Compression::Zstd; + options.compressionLevel = mcap::CompressionLevel(state.range(1)); + + // Open an output memory buffer and write the file header + mcap::BufferedWriter out{}; + writer.open(out, options); + + // Register a Channel Info record + mcap::ChannelInfo topic("/chatter", "ros1", "std_msgs/String", StringSchema); + writer.addChannel(topic); + + // Create a message + mcap::Message msg; + msg.channelId = topic.channelId; + msg.sequence = 0; + msg.publishTime = 0; + msg.recordTime = msg.publishTime; + msg.data = payload.data(); + msg.dataSize = payload.size(); while (state.KeepRunning()) { - for (size_t i = 0; i < iterations; i++) { + for (size_t i = 0; i < WriteIterations; i++) { writer.write(msg); benchmark::ClobberMemory(); } @@ -167,10 +285,8 @@ static void BM_McapWriterStreamWriterUnchunked(benchmark::State& state) { msg.data = payload.data(); msg.dataSize = payload.size(); - const auto iterations = size_t(state.range(0)); - while (state.KeepRunning()) { - for (size_t i = 0; i < iterations; i++) { + for (size_t i = 0; i < WriteIterations; i++) { writer.write(msg); benchmark::ClobberMemory(); } @@ -191,8 +307,7 @@ static void BM_McapWriterStreamWriterChunked(benchmark::State& state) { // Create a chunked writer using the ros1 profile mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); - options.noChunking = false; - options.chunkSize = uint64_t(state.range(1)); + options.chunkSize = uint64_t(state.range(0)); // Open an output file stream and write the file header std::ofstream out("benchmark.mcap", std::ios::binary); @@ -211,10 +326,8 @@ static void BM_McapWriterStreamWriterChunked(benchmark::State& state) { msg.data = payload.data(); msg.dataSize = payload.size(); - const auto iterations = size_t(state.range(0)); - while (state.KeepRunning()) { - for (size_t i = 0; i < iterations; i++) { + for (size_t i = 0; i < WriteIterations; i++) { writer.write(msg); benchmark::ClobberMemory(); } @@ -226,41 +339,59 @@ static void BM_McapWriterStreamWriterChunked(benchmark::State& state) { } int main(int argc, char* argv[]) { + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterUnchunkedUnindexed", + BM_McapWriterBufferedWriterUnchunkedUnindexed); benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterUnchunked", - BM_McapWriterBufferedWriterUnchunked) - ->Arg(10000); + BM_McapWriterBufferedWriterUnchunked); benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterChunked", BM_McapWriterBufferedWriterChunked) - ->Args({10000, 1}) - ->Args({10000, 10}) - ->Args({10000, 100}) - ->Args({10000, 1000}) - ->Args({10000, 10000}) - ->Args({10000, 100000}) - ->Args({10000, 1000000}) - ->Args({10000, 10000000}); + ->Arg(1) + ->Arg(10) + ->Arg(100) + ->Arg(1000) + ->Arg(10000) + ->Arg(100000) + ->Arg(1000000) + ->Arg(10000000); benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterChunkedUnindexed", BM_McapWriterBufferedWriterChunkedUnindexed) - ->Args({10000, 1}) - ->Args({10000, 10}) - ->Args({10000, 100}) - ->Args({10000, 1000}) - ->Args({10000, 10000}) - ->Args({10000, 100000}) - ->Args({10000, 1000000}) - ->Args({10000, 10000000}); + ->Arg(1) + ->Arg(10) + ->Arg(100) + ->Arg(1000) + ->Arg(10000) + ->Arg(100000) + ->Arg(1000000) + ->Arg(10000000); + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterLZ4", BM_McapWriterBufferedWriterLZ4) + ->Args({1, 0}) + ->Args({1, 1}) + ->Args({1, 2}) + ->Args({100000, 0}) + ->Args({100000, 1}) + ->Args({100000, 2}); + benchmark::RegisterBenchmark("BM_McapWriterBufferedWriterZStd", BM_McapWriterBufferedWriterZStd) + ->Args({1, 0}) + ->Args({1, 1}) + ->Args({1, 2}) + ->Args({1, 3}) + ->Args({1, 4}) + ->Args({100000, 0}) + ->Args({100000, 1}) + ->Args({100000, 2}) + ->Args({100000, 3}) + ->Args({100000, 4}); benchmark::RegisterBenchmark("BM_McapWriterStreamWriterUnchunked", - BM_McapWriterStreamWriterUnchunked) - ->Arg(10000); + BM_McapWriterStreamWriterUnchunked); benchmark::RegisterBenchmark("BM_McapWriterStreamWriterChunked", BM_McapWriterStreamWriterChunked) - ->Args({10000, 1}) - ->Args({10000, 10}) - ->Args({10000, 100}) - ->Args({10000, 1000}) - ->Args({10000, 10000}) - ->Args({10000, 100000}) - ->Args({10000, 1000000}) - ->Args({10000, 10000000}); + ->Arg(1) + ->Arg(10) + ->Arg(100) + ->Arg(1000) + ->Arg(10000) + ->Arg(100000) + ->Arg(1000000) + ->Arg(10000000); benchmark::Initialize(&argc, argv); benchmark::RunSpecifiedBenchmarks(); diff --git a/cpp/examples/bag2mcap.cpp b/cpp/examples/bag2mcap.cpp index c38af3b0d9..5b897b2306 100644 --- a/cpp/examples/bag2mcap.cpp +++ b/cpp/examples/bag2mcap.cpp @@ -17,6 +17,7 @@ int main() { mcap::McapWriter writer; auto options = mcap::McapWriterOptions("ros1"); + options.compression = mcap::Compression::Zstd; std::ofstream out("output.mcap", std::ios::binary); writer.open(out, options); @@ -37,7 +38,15 @@ int main() { msg.data = payload.data(); msg.dataSize = payload.size(); - writer.write(msg); + const auto res = writer.write(msg); + if (!res.ok()) { + std::cerr << "Failed to write message: " << res.message << "\n"; + writer.terminate(); + out.close(); + std::remove("output.mcap"); + return 1; + } + writer.close(); return 0; diff --git a/cpp/mcap/conanfile.py b/cpp/mcap/conanfile.py index cf930f511c..914f59272f 100644 --- a/cpp/mcap/conanfile.py +++ b/cpp/mcap/conanfile.py @@ -11,7 +11,7 @@ class McapConan(ConanFile): topics = ("mcap", "serialization", "deserialization", "recording") settings = ("os", "compiler", "build_type", "arch") - requires = ("zlib/1.2.11", "zstd/1.5.1") + requires = ("lz4/1.9.3", "zstd/1.5.1") generators = "cmake" def validate(self): diff --git a/cpp/mcap/include/mcap/errors.hpp b/cpp/mcap/include/mcap/errors.hpp index 032b9d622e..831b208b25 100644 --- a/cpp/mcap/include/mcap/errors.hpp +++ b/cpp/mcap/include/mcap/errors.hpp @@ -34,6 +34,10 @@ struct Status { } } + Status(StatusCode code, const std::string& message) + : code(code) + , message(message) {} + bool ok() const { return code == StatusCode::Success; } diff --git a/cpp/mcap/include/mcap/mcap.hpp b/cpp/mcap/include/mcap/mcap.hpp index e69c356847..526de74491 100644 --- a/cpp/mcap/include/mcap/mcap.hpp +++ b/cpp/mcap/include/mcap/mcap.hpp @@ -1,8 +1,10 @@ #pragma once #include "errors.hpp" +#include #include #include +#include #include #include #include @@ -10,6 +12,10 @@ #include #include +#define ZSTD_STATIC_LINKING_ONLY +#include +#include + namespace mcap { #define LIBRARY_VERSION "0.0.1" @@ -31,6 +37,14 @@ enum struct Compression { Zstd, }; +enum struct CompressionLevel { + Fastest, + Fast, + Default, + Slow, + Slowest, +}; + enum struct OpCode : uint8_t { Header = 0x01, Footer = 0x02, @@ -148,6 +162,7 @@ struct McapWriterOptions { bool noIndexing; uint64_t chunkSize; Compression compression; + CompressionLevel compressionLevel; std::string profile; std::string library; mcap::KeyValueMap metadata; @@ -157,6 +172,7 @@ struct McapWriterOptions { , noIndexing(false) , chunkSize(DefaultChunkSize) , compression(Compression::None) + , compressionLevel(CompressionLevel::Default) , profile(profile) , library("libmcap " LIBRARY_VERSION) {} }; @@ -176,15 +192,29 @@ struct IReadable { virtual uint64_t read(std::byte* output, uint64_t size) = 0; }; +struct IChunkWriter { + virtual inline ~IChunkWriter() = default; + + virtual void write(const std::byte* data, uint64_t size) = 0; + virtual void end() = 0; + virtual uint64_t size() const = 0; + virtual bool empty() const = 0; + virtual void clear() = 0; + virtual const std::byte* data() const = 0; +}; + /** - * @brief An in-memory IWritable implementation backed by a growable buffer. + * @brief An in-memory IWritable/IChunkWriter implementation backed by a + * growable buffer. */ -class BufferedWriter final : public IWritable { +class BufferedWriter final : public IWritable, public IChunkWriter { public: void write(const std::byte* data, uint64_t size) override; - uint64_t size() const override; void end() override; - const std::byte* data() const; + uint64_t size() const override; + bool empty() const override; + void clear() override; + const std::byte* data() const override; private: std::vector buffer_; @@ -208,6 +238,50 @@ class StreamWriter final : public IWritable { uint64_t size_ = 0; }; +/** + * @brief An in-memory IWritable/IChunkWriter implementation that holds data in + * a temporary buffer before flushing to an LZ4-compressed buffer. + */ +class LZ4Writer final : public IWritable, public IChunkWriter { +public: + LZ4Writer(CompressionLevel compressionLevel, uint64_t chunkSize); + ~LZ4Writer() override = default; + + void write(const std::byte* data, uint64_t size) override; + void end() override; + uint64_t size() const override; + bool empty() const override; + void clear() override; + const std::byte* data() const override; + +private: + std::vector preEndBuffer_; + std::vector buffer_; + int acceleration_ = 1; +}; + +/** + * @brief An in-memory IWritable/IChunkWriter implementation that holds data in + * a temporary buffer before flushing to an ZStandard-compressed buffer. + */ +class ZStdWriter final : public IWritable, public IChunkWriter { +public: + ZStdWriter(CompressionLevel compressionLevel, uint64_t chunkSize); + ~ZStdWriter() override; + + void write(const std::byte* data, uint64_t size) override; + void end() override; + uint64_t size() const override; + bool empty() const override; + void clear() override; + const std::byte* data() const override; + +private: + std::vector preEndBuffer_; + std::vector buffer_; + ZSTD_CCtx* zstdContext_ = nullptr; +}; + class McapWriter final { public: ~McapWriter(); @@ -230,10 +304,18 @@ class McapWriter final { void open(std::ostream& stream, const McapWriterOptions& options); /** - * @brief Write the MCAP footer and close the output stream. + * @brief Write the MCAP footer, flush pending writes to the output stream, + * and reset internal state. */ void close(); + /** + * @brief Reset internal state without writing the MCAP footer or flushing + * pending writes. This should only be used in error cases as the output MCAP + * file will be truncated. + */ + void terminate(); + /** * @brief Add channel info and set `info.channelId` to a generated channel id. * The channel id is used when adding messages. @@ -263,31 +345,38 @@ class McapWriter final { uint64_t chunkSize_ = DefaultChunkSize; mcap::IWritable* output_ = nullptr; std::unique_ptr streamOutput_; + std::unique_ptr uncompressedChunk_; + std::unique_ptr lz4Chunk_; + std::unique_ptr zstdChunk_; std::vector channels_; std::vector attachmentIndex_; std::vector chunkIndex_; Statistics statistics_{}; - mcap::BufferedWriter currentChunk_; std::unordered_map currentMessageIndex_; uint64_t currentChunkStart_ = std::numeric_limits::max(); uint64_t currentChunkEnd_ = std::numeric_limits::min(); + Compression compression_ = Compression::None; + uint64_t uncompressedSize_ = 0; bool indexing_ = true; + bool opened_ = false; - void writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkData); + mcap::IWritable& getOutput(); + mcap::IChunkWriter* getChunkWriter(); + void writeChunk(mcap::IWritable& output, mcap::IChunkWriter& chunkData); static void writeMagic(mcap::IWritable& output); - static void write(mcap::IWritable& output, const mcap::Header& header); - static void write(mcap::IWritable& output, const mcap::Footer& footer); - static void write(mcap::IWritable& output, const mcap::ChannelInfo& info); - static void write(mcap::IWritable& output, const mcap::Message& message); - static void write(mcap::IWritable& output, const mcap::Attachment& attachment); - static void write(mcap::IWritable& output, const mcap::Chunk& chunk); - static void write(mcap::IWritable& output, const mcap::MessageIndex& index); - static void write(mcap::IWritable& output, const mcap::ChunkIndex& index); - static void write(mcap::IWritable& output, const mcap::AttachmentIndex& index); - static void write(mcap::IWritable& output, const mcap::Statistics& stats); - static void write(mcap::IWritable& output, const mcap::UnknownRecord& record); + static uint64_t write(mcap::IWritable& output, const mcap::Header& header); + static uint64_t write(mcap::IWritable& output, const mcap::Footer& footer); + static uint64_t write(mcap::IWritable& output, const mcap::ChannelInfo& info); + static uint64_t write(mcap::IWritable& output, const mcap::Message& message); + static uint64_t write(mcap::IWritable& output, const mcap::Attachment& attachment); + static uint64_t write(mcap::IWritable& output, const mcap::Chunk& chunk); + static uint64_t write(mcap::IWritable& output, const mcap::MessageIndex& index); + static uint64_t write(mcap::IWritable& output, const mcap::ChunkIndex& index); + static uint64_t write(mcap::IWritable& output, const mcap::AttachmentIndex& index); + static uint64_t write(mcap::IWritable& output, const mcap::Statistics& stats); + static uint64_t write(mcap::IWritable& output, const mcap::UnknownRecord& record); static void write(mcap::IWritable& output, const std::string_view str); static void write(mcap::IWritable& output, OpCode value); diff --git a/cpp/mcap/include/mcap/mcap.inl b/cpp/mcap/include/mcap/mcap.inl index b8a03a7adc..76e321e94c 100644 --- a/cpp/mcap/include/mcap/mcap.inl +++ b/cpp/mcap/include/mcap/mcap.inl @@ -3,6 +3,34 @@ static_assert(std::numeric_limits::digits == 8); namespace mcap { +// Internal methods //////////////////////////////////////////////////////////// + +namespace internal { + +uint32_t KeyValueMapSize(const KeyValueMap& map) { + uint32_t size = 0; + for (const auto& [key, value] : map) { + size += 4 + key.size() + 4 + value.size(); + } + return size; +} + +const std::string& CompressionString(Compression compression) { + static std::string none = ""; + static std::string lz4 = "lz4"; + static std::string zstd = "zstd"; + switch (compression) { + case Compression::None: + return none; + case Compression::Lz4: + return lz4; + case Compression::Zstd: + return zstd; + } +} + +} // namespace internal + // Public API ////////////////////////////////////////////////////////////////// McapWriter::~McapWriter() { @@ -10,8 +38,21 @@ McapWriter::~McapWriter() { } void McapWriter::open(mcap::IWritable& writer, const McapWriterOptions& options) { + opened_ = true; chunkSize_ = options.noChunking ? 0 : options.chunkSize; indexing_ = !options.noIndexing; + compression_ = chunkSize_ > 0 ? options.compression : Compression::None; + switch (compression_) { + case Compression::None: + uncompressedChunk_ = std::make_unique(); + break; + case Compression::Lz4: + lz4Chunk_ = std::make_unique(options.compressionLevel, chunkSize_); + break; + case Compression::Zstd: + zstdChunk_ = std::make_unique(options.compressionLevel, chunkSize_); + break; + } output_ = &writer; writeMagic(writer); write(writer, Header{options.profile, options.library, options.metadata}); @@ -23,15 +64,15 @@ void McapWriter::open(std::ostream& stream, const McapWriterOptions& options) { } void McapWriter::close() { - if (!output_) { + if (!opened_ || !output_) { return; } - auto& output = *output_; + auto* chunkWriter = getChunkWriter(); + auto& fileOutput = *output_; // Check if there is an open chunk that needs to be closed - if (currentChunk_.size() > 0) { - writeChunk(output, currentChunk_); - currentChunk_.end(); + if (chunkWriter && !chunkWriter->empty()) { + writeChunk(fileOutput, *chunkWriter); } uint64_t indexOffset = 0; @@ -39,36 +80,54 @@ void McapWriter::close() { if (indexing_) { // Get the offset of the End Of File section - indexOffset = output.size(); + indexOffset = fileOutput.size(); // Write all channel info records for (const auto& channel : channels_) { - write(output, channel); + write(fileOutput, channel); } // Write chunk index records for (const auto& chunkIndexRecord : chunkIndex_) { - write(output, chunkIndexRecord); + write(fileOutput, chunkIndexRecord); } // Write attachment index records for (const auto& attachmentIndexRecord : attachmentIndex_) { - write(output, attachmentIndexRecord); + write(fileOutput, attachmentIndexRecord); } // Write the statistics record - write(output, statistics_); + write(fileOutput, statistics_); } // TODO: Calculate the index CRC // Write the footer and trailing magic - write(output, mcap::Footer{indexOffset, indexCrc}); - writeMagic(output); + write(fileOutput, mcap::Footer{indexOffset, indexCrc}); + writeMagic(fileOutput); + + // Flush output + fileOutput.end(); + + terminate(); +} - output.end(); +void McapWriter::terminate() { output_ = nullptr; streamOutput_.reset(); + uncompressedChunk_.reset(); + zstdChunk_.reset(); + + channels_.clear(); + attachmentIndex_.clear(); + chunkIndex_.clear(); + statistics_ = {}; + currentMessageIndex_.clear(); + currentChunkStart_ = std::numeric_limits::max(); + currentChunkEnd_ = std::numeric_limits::min(); + + opened_ = false; } void McapWriter::addChannel(mcap::ChannelInfo& info) { @@ -80,7 +139,7 @@ mcap::Status McapWriter::write(const mcap::Message& message) { if (!output_) { return StatusCode::NotOpen; } - auto& output = chunkSize_ > 0 ? currentChunk_ : *output_; + auto& output = getOutput(); auto& channelMessageCounts = statistics_.channelMessageCounts; // Write out channel info if we have not yet done so @@ -90,23 +149,27 @@ mcap::Status McapWriter::write(const mcap::Message& message) { return StatusCode::InvalidChannelId; } - write(output, channels_[index]); + // Write the channel info record + uncompressedSize_ += write(output, channels_[index]); + + // Update channel statistics channelMessageCounts.emplace(message.channelId, 0); ++statistics_.channelCount; } - const uint64_t messageOffset = output.size(); + const uint64_t messageOffset = uncompressedSize_; // Write the message - write(output, message); + uncompressedSize_ += write(output, message); - // Update statistics + // Update message statistics if (indexing_) { ++statistics_.messageCount; channelMessageCounts[message.channelId] += 1; } - if (chunkSize_ > 0) { + auto* chunkWriter = getChunkWriter(); + if (chunkWriter) { if (indexing_) { // Update the message index auto& messageIndex = currentMessageIndex_[message.channelId]; @@ -120,9 +183,9 @@ mcap::Status McapWriter::write(const mcap::Message& message) { } // Check if the current chunk is ready to close - if (currentChunk_.size() >= chunkSize_) { - writeChunk(*output_, currentChunk_); - currentChunk_.end(); + if (uncompressedSize_ >= chunkSize_) { + auto& fileOutput = *output_; + writeChunk(fileOutput, *chunkWriter); } } @@ -133,18 +196,20 @@ mcap::Status McapWriter::write(const mcap::Attachment& attachment) { if (!output_) { return StatusCode::NotOpen; } - auto& output = *output_; + auto& fileOutput = *output_; // Check if we have an open chunk that needs to be closed - if (currentChunk_.size() > 0) { - writeChunk(output, currentChunk_); - currentChunk_.end(); + auto* chunkWriter = getChunkWriter(); + if (chunkWriter && !chunkWriter->empty()) { + writeChunk(fileOutput, *chunkWriter); } - const uint64_t fileOffset = output.size(); + const uint64_t fileOffset = fileOutput.size(); - write(output, attachment); + // Write the attachment + write(fileOutput, attachment); + // Update statistics and attachment index if (indexing_) { ++statistics_.attachmentCount; attachmentIndex_.emplace_back(attachment, fileOffset); @@ -155,28 +220,44 @@ mcap::Status McapWriter::write(const mcap::Attachment& attachment) { // Private methods ///////////////////////////////////////////////////////////// -namespace internal { +mcap::IWritable& McapWriter::getOutput() { + if (chunkSize_ == 0) { + return *output_; + } + switch (compression_) { + case Compression::None: + return *uncompressedChunk_; + case Compression::Lz4: + return *lz4Chunk_; + case Compression::Zstd: + return *zstdChunk_; + } +} -uint32_t KeyValueMapSize(const KeyValueMap& map) { - uint32_t size = 0; - for (const auto& [key, value] : map) { - size += 4 + key.size() + 4 + value.size(); +mcap::IChunkWriter* McapWriter::getChunkWriter() { + switch (compression_) { + case Compression::None: + return uncompressedChunk_.get(); + case Compression::Lz4: + return lz4Chunk_.get(); + case Compression::Zstd: + return zstdChunk_.get(); } - return size; } -} // namespace internal +void McapWriter::writeChunk(mcap::IWritable& output, mcap::IChunkWriter& chunkData) { + const auto& compression = internal::CompressionString(compression_); + + // Flush any in-progress compression stream + chunkData.end(); -void McapWriter::writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkData) { - uint64_t uncompressedSize = chunkData.size(); - uint32_t uncompressedCrc = 0; - std::string compression = ""; - uint64_t recordsSize = uncompressedSize; - const std::byte* records = chunkData.data(); + const uint64_t compressedSize = chunkData.size(); + const std::byte* data = chunkData.data(); + const uint32_t uncompressedCrc = 0; // Write the chunk const uint64_t chunkOffset = output.size(); - write(output, Chunk{uncompressedSize, uncompressedCrc, compression, recordsSize, records}); + write(output, Chunk{uncompressedSize_, uncompressedCrc, compression, compressedSize, data}); if (indexing_) { // Update statistics @@ -200,21 +281,24 @@ void McapWriter::writeChunk(mcap::IWritable& output, const mcap::BufferedWriter& chunkIndexRecord.chunkOffset = chunkOffset; chunkIndexRecord.messageIndexLength = messageIndexLength; chunkIndexRecord.compression = compression; - chunkIndexRecord.compressedSize = recordsSize; - chunkIndexRecord.uncompressedSized = uncompressedSize; + chunkIndexRecord.compressedSize = compressedSize; + chunkIndexRecord.uncompressedSized = uncompressedSize_; chunkIndexRecord.crc = 0; - // Reset start/end times for the next chunk + // Reset uncompressedSize and start/end times for the next chunk + uncompressedSize_ = 0; currentChunkStart_ = std::numeric_limits::max(); currentChunkEnd_ = std::numeric_limits::min(); } + + chunkData.clear(); } void McapWriter::writeMagic(mcap::IWritable& output) { write(output, reinterpret_cast(Magic), sizeof(Magic)); } -void McapWriter::write(mcap::IWritable& output, const mcap::Header& header) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Header& header) { const uint32_t metadataSize = internal::KeyValueMapSize(header.metadata); const uint64_t recordSize = 4 + header.profile.size() + 4 + header.library.size() + 4 + metadataSize; @@ -224,16 +308,22 @@ void McapWriter::write(mcap::IWritable& output, const mcap::Header& header) { write(output, header.profile); write(output, header.library); write(output, header.metadata, metadataSize); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::Footer& footer) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Footer& footer) { + const uint64_t recordSize = 12; + write(output, OpCode::Footer); - write(output, uint64_t(12)); + write(output, recordSize); write(output, footer.indexOffset); write(output, footer.indexCrc); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::ChannelInfo& info) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::ChannelInfo& info) { const uint32_t userDataSize = internal::KeyValueMapSize(info.userData); const uint64_t recordSize = 2 + 4 + info.topicName.size() + 4 + info.encoding.size() + 4 + info.schemaName.size() + 4 + info.schema.size() + 4 + userDataSize + @@ -249,9 +339,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::ChannelInfo& info) { write(output, info.schema); write(output, info.userData, userDataSize); write(output, crc); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::Message& message) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Message& message) { const uint64_t recordSize = 2 + 4 + 8 + 8 + message.dataSize; write(output, OpCode::Message); @@ -261,9 +353,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::Message& message) { write(output, message.publishTime); write(output, message.recordTime); write(output, message.data, message.dataSize); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::Attachment& attachment) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Attachment& attachment) { const uint64_t recordSize = 4 + attachment.name.size() + 8 + 4 + attachment.contentType.size() + attachment.dataSize; @@ -273,9 +367,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::Attachment& attachme write(output, attachment.recordTime); write(output, attachment.contentType); write(output, attachment.data, attachment.dataSize); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::Chunk& chunk) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Chunk& chunk) { const uint64_t recordSize = 8 + 4 + 4 + chunk.compression.size() + chunk.recordsSize; write(output, OpCode::Chunk); @@ -284,9 +380,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::Chunk& chunk) { write(output, chunk.uncompressedCrc); write(output, chunk.compression); write(output, chunk.records, chunk.recordsSize); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::MessageIndex& index) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::MessageIndex& index) { const uint32_t recordsSize = index.records.size() * 16; const uint64_t recordSize = 2 + 4 + 4 + recordsSize + 4; const uint32_t crc = 0; @@ -303,9 +401,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::MessageIndex& index) } write(output, crc); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::ChunkIndex& index) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::ChunkIndex& index) { const uint32_t messageIndexOffsetsSize = index.messageIndexOffsets.size() * 10; const uint64_t recordSize = 8 + 8 + 8 + 4 + messageIndexOffsetsSize + 8 + 4 + index.compression.size() + 8 + 8 + 4; @@ -328,9 +428,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::ChunkIndex& index) { write(output, index.compressedSize); write(output, index.uncompressedSized); write(output, crc); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::AttachmentIndex& index) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::AttachmentIndex& index) { const uint64_t recordSize = 8 + 8 + 4 + index.name.size() + 4 + index.contentType.size() + 8; write(output, OpCode::AttachmentIndex); @@ -340,9 +442,11 @@ void McapWriter::write(mcap::IWritable& output, const mcap::AttachmentIndex& ind write(output, index.name); write(output, index.contentType); write(output, index.offset); + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::Statistics& stats) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::Statistics& stats) { const uint32_t channelMessageCountsSize = stats.channelMessageCounts.size() * 10; const uint64_t recordSize = 8 + 4 + 4 + 4 + 4 + channelMessageCountsSize; @@ -358,12 +462,16 @@ void McapWriter::write(mcap::IWritable& output, const mcap::Statistics& stats) { write(output, channelId); write(output, messageCount); } + + return 9 + recordSize; } -void McapWriter::write(mcap::IWritable& output, const mcap::UnknownRecord& record) { +uint64_t McapWriter::write(mcap::IWritable& output, const mcap::UnknownRecord& record) { write(output, mcap::OpCode(record.opcode)); write(output, record.dataSize); write(output, record.data, record.dataSize); + + return 9 + record.dataSize; } void McapWriter::write(mcap::IWritable& output, const std::string_view str) { @@ -405,11 +513,19 @@ void BufferedWriter::write(const std::byte* data, uint64_t size) { buffer_.insert(buffer_.end(), data, data + size); } +void BufferedWriter::end() { + // no-op +} + uint64_t BufferedWriter::size() const { return buffer_.size(); } -void BufferedWriter::end() { +bool BufferedWriter::empty() const { + return buffer_.empty(); +} + +void BufferedWriter::clear() { buffer_.clear(); } @@ -436,4 +552,130 @@ uint64_t StreamWriter::size() const { return size_; } +// LZ4Writer /////////////////////////////////////////////////////////////////// + +namespace internal { + +int LZ4AccelerationLevel(CompressionLevel level) { + switch (level) { + case CompressionLevel::Fastest: + return 65537; + case CompressionLevel::Fast: + return 32768; + case CompressionLevel::Default: + case CompressionLevel::Slow: + case CompressionLevel::Slowest: + return 1; + } +} + +} // namespace internal + +LZ4Writer::LZ4Writer(CompressionLevel compressionLevel, uint64_t chunkSize) { + acceleration_ = internal::LZ4AccelerationLevel(compressionLevel); + preEndBuffer_.reserve(chunkSize); +} + +void LZ4Writer::write(const std::byte* data, uint64_t size) { + preEndBuffer_.insert(preEndBuffer_.end(), data, data + size); +} + +void LZ4Writer::end() { + const auto dstCapacity = LZ4_compressBound(preEndBuffer_.size()); + buffer_.resize(dstCapacity); + const int dstSize = LZ4_compress_fast(reinterpret_cast(preEndBuffer_.data()), + reinterpret_cast(buffer_.data()), + preEndBuffer_.size(), dstCapacity, acceleration_); + buffer_.resize(dstSize); + preEndBuffer_.clear(); +} + +uint64_t LZ4Writer::size() const { + return buffer_.size(); +} + +bool LZ4Writer::empty() const { + return buffer_.empty() && preEndBuffer_.empty(); +} + +void LZ4Writer::clear() { + preEndBuffer_.clear(); + buffer_.clear(); +} + +const std::byte* LZ4Writer::data() const { + return buffer_.data(); +} + +// ZStdWriter ////////////////////////////////////////////////////////////////// + +namespace internal { + +int ZStdCompressionLevel(CompressionLevel level) { + switch (level) { + case CompressionLevel::Fastest: + return -5; + case CompressionLevel::Fast: + return -3; + case CompressionLevel::Default: + return 1; + case CompressionLevel::Slow: + return 5; + case CompressionLevel::Slowest: + return 19; + } +} + +} // namespace internal + +// ZStdWriter ////////////////////////////////////////////////////////////////// + +ZStdWriter::ZStdWriter(CompressionLevel compressionLevel, uint64_t chunkSize) { + zstdContext_ = ZSTD_createCCtx(); + ZSTD_CCtx_setParameter(zstdContext_, ZSTD_c_compressionLevel, + internal::ZStdCompressionLevel(compressionLevel)); + preEndBuffer_.reserve(chunkSize); +} + +ZStdWriter::~ZStdWriter() { + ZSTD_freeCCtx(zstdContext_); +} + +void ZStdWriter::write(const std::byte* data, uint64_t size) { + preEndBuffer_.insert(preEndBuffer_.end(), data, data + size); +} + +void ZStdWriter::end() { + const auto dstCapacity = ZSTD_compressBound(preEndBuffer_.size()); + buffer_.resize(dstCapacity); + const int dstSize = ZSTD_compress2(zstdContext_, buffer_.data(), dstCapacity, + preEndBuffer_.data(), preEndBuffer_.size()); + if (ZSTD_isError(dstSize)) { + const auto errCode = ZSTD_getErrorCode(dstSize); + std::cerr << "ZSTD_compress2 failed: " << ZSTD_getErrorName(dstSize) << " (" + << ZSTD_getErrorString(errCode) << ")\n"; + std::abort(); + } + ZSTD_CCtx_reset(zstdContext_, ZSTD_reset_session_only); + buffer_.resize(dstSize); + preEndBuffer_.clear(); +} + +uint64_t ZStdWriter::size() const { + return buffer_.size(); +} + +bool ZStdWriter::empty() const { + return buffer_.empty() && preEndBuffer_.empty(); +} + +void ZStdWriter::clear() { + preEndBuffer_.clear(); + buffer_.clear(); +} + +const std::byte* ZStdWriter::data() const { + return buffer_.data(); +} + } // namespace mcap From 597fcd956eb26bba15ebf2055728ceaf330b015e Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Wed, 26 Jan 2022 16:30:50 -0800 Subject: [PATCH 033/635] Add MCAP client code in Go (#53) Adds a library (libmcap) and a command line tool (mcap) for demonstrating the file format in Go. --- .gitattributes | 5 + .github/workflows/ci.yml | 13 + go/Makefile | 5 + go/README.md | 7 + go/libmcap/Makefile | 5 + go/libmcap/README.md | 6 + go/libmcap/bag2mcap.go | 231 +++++++ go/libmcap/benchmark_test.go | 37 ++ go/libmcap/counting_writer.go | 54 ++ go/libmcap/go.mod | 15 + go/libmcap/go.sum | 15 + go/libmcap/indexed_message_iterator.go | 322 +++++++++ go/libmcap/lexer.go | 326 ++++++++++ go/libmcap/lexer_test.go | 326 ++++++++++ go/libmcap/mcap.go | 339 ++++++++++ go/libmcap/reader.go | 310 +++++++++ go/libmcap/reader_test.go | 272 ++++++++ go/libmcap/testutils.go | 117 ++++ go/libmcap/unindexed_message_iterator.go | 53 ++ go/libmcap/writer.go | 447 +++++++++++++ go/libmcap/writer_test.go | 152 +++++ go/mcap/README.md | 64 ++ go/mcap/cmd/cat.go | 50 ++ go/mcap/cmd/convert.go | 39 ++ go/mcap/cmd/info.go | 34 + go/mcap/cmd/root.go | 43 ++ go/mcap/go.mod | 35 + go/mcap/go.sum | 789 +++++++++++++++++++++++ go/mcap/main.go | 22 + testdata/bags/demo.bag | 3 + testdata/mcap/demo.mcap | 3 + 31 files changed, 4139 insertions(+) create mode 100644 go/Makefile create mode 100644 go/README.md create mode 100644 go/libmcap/Makefile create mode 100644 go/libmcap/README.md create mode 100644 go/libmcap/bag2mcap.go create mode 100644 go/libmcap/benchmark_test.go create mode 100644 go/libmcap/counting_writer.go create mode 100644 go/libmcap/go.mod create mode 100644 go/libmcap/go.sum create mode 100644 go/libmcap/indexed_message_iterator.go create mode 100644 go/libmcap/lexer.go create mode 100644 go/libmcap/lexer_test.go create mode 100644 go/libmcap/mcap.go create mode 100644 go/libmcap/reader.go create mode 100644 go/libmcap/reader_test.go create mode 100644 go/libmcap/testutils.go create mode 100644 go/libmcap/unindexed_message_iterator.go create mode 100644 go/libmcap/writer.go create mode 100644 go/libmcap/writer_test.go create mode 100644 go/mcap/README.md create mode 100644 go/mcap/cmd/cat.go create mode 100644 go/mcap/cmd/convert.go create mode 100644 go/mcap/cmd/info.go create mode 100644 go/mcap/cmd/root.go create mode 100644 go/mcap/go.mod create mode 100644 go/mcap/go.sum create mode 100644 go/mcap/main.go create mode 100644 testdata/bags/demo.bag create mode 100644 testdata/mcap/demo.mcap diff --git a/.gitattributes b/.gitattributes index 24a8e87939..28f742f657 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,6 @@ *.png filter=lfs diff=lfs merge=lfs -text +testdata/mcap/demo.mcap filter=lfs diff=lfs merge=lfs -text +testdata/ filter=lfs diff=lfs merge=lfs -text +testdata/bags filter=lfs diff=lfs merge=lfs -text +testdata/bags/demo.bag filter=lfs diff=lfs merge=lfs -text +testdata/mcap filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5ffac5dd20..79d1aca9c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,3 +42,16 @@ jobs: - run: yarn workspace @foxglove/mcap lint:ci - run: yarn workspace @foxglove/mcap typecheck - run: yarn workspace @foxglove/mcap test + + go: + runs-on: ubuntu-latest + defaults: + run: + working-directory: go + steps: + - uses: actions/setup-go@v2 + with: + go-version: 1.17.x + - uses: actions/checkout@v2 + - run: git lfs pull + - run: make test diff --git a/go/Makefile b/go/Makefile new file mode 100644 index 0000000000..2f33914864 --- /dev/null +++ b/go/Makefile @@ -0,0 +1,5 @@ +test: + make -C libmcap test + +bench: + make -C libmcap bench diff --git a/go/README.md b/go/README.md new file mode 100644 index 0000000000..3257f11d54 --- /dev/null +++ b/go/README.md @@ -0,0 +1,7 @@ +## MCAP go libraries + +[libmcap]: ./libmcap +[mcap]: ./mcap + +* [Library for parsing mcap files][libmcap] +* [Command line tool for format demonstration][mcap] diff --git a/go/libmcap/Makefile b/go/libmcap/Makefile new file mode 100644 index 0000000000..bbac8cd941 --- /dev/null +++ b/go/libmcap/Makefile @@ -0,0 +1,5 @@ +test: + go test ./... + +bench: + go test -benchmem -run=^$$ -count 5 -bench ^BenchmarkLexer/demo -memprofile mem.out -cpuprofile cpu.out diff --git a/go/libmcap/README.md b/go/libmcap/README.md new file mode 100644 index 0000000000..655464343d --- /dev/null +++ b/go/libmcap/README.md @@ -0,0 +1,6 @@ +## libmcap + +> Note: This library is experimental and will change without warning until +> finalization of the spec. + +An experimental library for writing and reading MCAP files in go. diff --git a/go/libmcap/bag2mcap.go b/go/libmcap/bag2mcap.go new file mode 100644 index 0000000000..98fbc47b41 --- /dev/null +++ b/go/libmcap/bag2mcap.go @@ -0,0 +1,231 @@ +package libmcap + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "log" + + "github.com/pierrec/lz4/v4" +) + +var ( + BagMagic = []byte("#ROSBAG V2.0\n") +) + +type BagOp byte + +const ( + OpBagHeader = 0x03 + OpBagChunk = 0x05 + OpBagConnection = 0x07 + OpBagMessageData = 0x02 + OpBagIndexData = 0x04 + OpBagChunkInfo = 0x06 +) + +func extractHeaderValue(header []byte, key []byte) ([]byte, error) { + var fieldlen uint32 + offset := 0 + for offset < len(header) { + fieldlen, offset = getUint32(header, offset) + field := header[offset : offset+int(fieldlen)] + parts := bytes.SplitN(field, []byte{'='}, 2) + if len(parts) != 2 { + return nil, fmt.Errorf("invalid header field: %s", field) + } + if bytes.Equal(key, parts[0]) { + return parts[1], nil + } + offset += int(fieldlen) + } + return nil, fmt.Errorf("key %s not found", key) +} + +func processBag( + r io.Reader, + connectionCallback func([]byte, []byte) error, + msgcallback func([]byte, []byte) error, + checkmagic bool, +) error { + if checkmagic { + magic := make([]byte, len(BagMagic)) + _, err := io.ReadFull(r, magic) + if err != nil { + log.Fatal(err) + } + if !bytes.Equal(magic, BagMagic) { + log.Fatal("not a bag") + } + } + + headerbuf := make([]byte, 1024) + buf := make([]byte, 8) + for { + // header len + _, err := io.ReadFull(r, buf[:4]) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + headerlen := binary.LittleEndian.Uint32(buf[:4]) + + // header + if len(headerbuf) < int(headerlen) { + headerbuf = make([]byte, headerlen*2) + } + _, err = io.ReadFull(r, headerbuf[:headerlen]) + if err != nil { + return err + } + + header := headerbuf[:headerlen] + + // data len + _, err = io.ReadFull(r, buf[4:8]) + if err != nil { + return err + } + datalen := binary.LittleEndian.Uint32(buf[4:8]) + + // opcode + opcode, err := extractHeaderValue(header, []byte("op")) + if err != nil { + return err + } + + // data + data := make([]byte, datalen) + _, err = io.ReadFull(r, data) + if err != nil { + return err + } + switch opcode[0] { + case OpBagHeader: + continue + case OpBagChunk: + compression, err := extractHeaderValue(header, []byte("compression")) + if err != nil { + return err + } + var reader io.Reader + switch string(compression) { + case "lz4": + reader = lz4.NewReader(bytes.NewReader(data)) + case "none": + reader = bytes.NewReader(data) + default: + return fmt.Errorf("unsupported compression: %s", compression) + } + err = processBag(reader, connectionCallback, msgcallback, false) + if err != nil && !errors.Is(err, io.EOF) { + return err + } + case OpBagConnection: + err := connectionCallback(header, data) + if err != nil { + return err + } + case OpBagMessageData: + err := msgcallback(header, data) + if err != nil { + return err + } + case OpBagIndexData: + continue + case OpBagChunkInfo: + continue + } + } + return nil +} + +func Bag2MCAP(r io.Reader, w io.Writer) error { + writer, err := NewWriter(w, &WriterOptions{ + Chunked: true, + ChunkSize: 4 * 1024 * 1024, + Compression: CompressionLZ4, + IncludeCRC: true, + }) + if err != nil { + return err + } + defer writer.Close() + + err = writer.WriteHeader("ros1", "golang-bag2mcap", map[string]string{"name": "my funky mcap file"}) + if err != nil { + return err + } + seq := uint32(0) + return processBag(r, + func(header, data []byte) error { + conn, err := extractHeaderValue(header, []byte("conn")) + if err != nil { + return err + } + connID := binary.LittleEndian.Uint16(conn) + topic, err := extractHeaderValue(header, []byte("topic")) + if err != nil { + return err + } + typ, err := extractHeaderValue(data, []byte("type")) + if err != nil { + return err + } + md5sum, err := extractHeaderValue(data, []byte("md5sum")) + if err != nil { + return err + } + msgdef, err := extractHeaderValue(data, []byte("message_definition")) + if err != nil { + return err + } + channelInfo := &ChannelInfo{ + ChannelID: connID, + TopicName: string(topic), + Encoding: "ros1", + SchemaName: string(typ), + Schema: msgdef, + UserData: map[string]string{ + "md5sum": string(md5sum), + }, + } + return writer.WriteChannelInfo(channelInfo) + }, + func(header, data []byte) error { + conn, err := extractHeaderValue(header, []byte("conn")) + if err != nil { + return err + } + connID := binary.LittleEndian.Uint16(conn) + time, err := extractHeaderValue(header, []byte("time")) + if err != nil { + return err + } + nsecs := rostimeToNanos(time) + err = writer.WriteMessage(&Message{ + ChannelID: connID, + Sequence: seq, + RecordTime: nsecs, + PublishTime: nsecs, + Data: data, + }) + if err != nil { + return err + } + seq++ + return nil + }, + true, + ) +} + +func rostimeToNanos(time []byte) uint64 { + secs := binary.LittleEndian.Uint32(time) + nsecs := binary.LittleEndian.Uint32(time[4:]) + return uint64(secs)*1000000000 + uint64(nsecs) +} diff --git a/go/libmcap/benchmark_test.go b/go/libmcap/benchmark_test.go new file mode 100644 index 0000000000..5f6692a4e6 --- /dev/null +++ b/go/libmcap/benchmark_test.go @@ -0,0 +1,37 @@ +package libmcap + +import ( + "bytes" + "errors" + "io" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func BenchmarkMessageIteration(b *testing.B) { + b.Run("indexed - memory", func(b *testing.B) { + bagfile, err := os.Open("testdata/demo.bag") + assert.Nil(b, err) + defer bagfile.Close() + + mcapfile := &bytes.Buffer{} + err = Bag2MCAP(bagfile, mcapfile) + assert.Nil(b, err) + r := NewReader(bytes.NewReader(mcapfile.Bytes())) + it, err := r.Messages(0, time.Now().UnixNano(), []string{}, true) + assert.Nil(b, err) + c := 0 + for { + _, _, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(b, err) + c++ + } + assert.Equal(b, 1606, c) + }) +} diff --git a/go/libmcap/counting_writer.go b/go/libmcap/counting_writer.go new file mode 100644 index 0000000000..fa4e9b1330 --- /dev/null +++ b/go/libmcap/counting_writer.go @@ -0,0 +1,54 @@ +package libmcap + +import ( + "hash" + "hash/crc32" + "io" +) + +type CountingCRCWriter struct { + w ResettableWriteCloser + size int64 + crc hash.Hash32 + computeCRC bool +} + +func (c *CountingCRCWriter) Reset(w io.Writer) { + c.w.Reset(w) +} + +func (c *CountingCRCWriter) ResetCRC() { + c.crc = crc32.NewIEEE() +} + +func (c *CountingCRCWriter) ResetSize() { + c.size = 0 +} + +func (c *CountingCRCWriter) CRC() uint32 { + return c.crc.Sum32() +} + +func (c *CountingCRCWriter) Size() int64 { + return c.size +} + +func (c *CountingCRCWriter) Close() error { + return c.w.Close() +} + +func (c *CountingCRCWriter) Write(p []byte) (int, error) { + c.size += int64(len(p)) + if c.computeCRC { + _, _ = c.crc.Write(p) + } + return c.w.Write(p) +} + +func NewCountingCRCWriter(w ResettableWriteCloser, computeCRC bool) *CountingCRCWriter { + return &CountingCRCWriter{ + w: w, + crc: crc32.NewIEEE(), + computeCRC: computeCRC, + } +} diff --git a/go/libmcap/go.mod b/go/libmcap/go.mod new file mode 100644 index 0000000000..f8277ffa9c --- /dev/null +++ b/go/libmcap/go.mod @@ -0,0 +1,15 @@ +module github.com/foxglove/mcap/go/libmcap + +go 1.17 + +require ( + github.com/klauspost/compress v1.14.1 + github.com/pierrec/lz4/v4 v4.1.12 + github.com/stretchr/testify v1.7.0 +) + +require ( + github.com/davecgh/go-spew v1.1.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect +) diff --git a/go/libmcap/go.sum b/go/libmcap/go.sum new file mode 100644 index 0000000000..9620dd0980 --- /dev/null +++ b/go/libmcap/go.sum @@ -0,0 +1,15 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/klauspost/compress v1.14.1 h1:hLQYb23E8/fO+1u53d02A97a8UnsddcvYzq4ERRU4ds= +github.com/klauspost/compress v1.14.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/pierrec/lz4/v4 v4.1.12 h1:44l88ehTZAUGW4VlO1QC4zkilL99M6Y9MXNwEs0uzP8= +github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/go/libmcap/indexed_message_iterator.go b/go/libmcap/indexed_message_iterator.go new file mode 100644 index 0000000000..bad3cca231 --- /dev/null +++ b/go/libmcap/indexed_message_iterator.go @@ -0,0 +1,322 @@ +package libmcap + +import ( + "bytes" + "fmt" + "io" + "sort" + + "github.com/klauspost/compress/zstd" + "github.com/pierrec/lz4/v4" +) + +type messageOffset struct { + chunkIndex int + chunkOffset int + timestamp uint64 +} + +// indexedMessageIterator is an iterator over an indexed mcap read seeker (as +// seeking is required). It makes reads in alternation from the index data +// section, the message index at the end of a chunk, and the chunk's contents. +type indexedMessageIterator struct { + lexer *lexer + rs io.ReadSeeker + topics map[string]bool + start uint64 + end uint64 + + channels map[uint16]*ChannelInfo + statistics *Statistics + chunksets [][]*ChunkIndex + chunkIndexes []*ChunkIndex + attachmentIndexes []*AttachmentIndex + + // current location in the index + activeChunksetIndex int // active chunkset + activeChunkIndex int // index of the active chunk within the set + activeChunkReader *bytes.Reader // active decompressed chunk + activeChunkLexer *lexer + messageOffsets []messageOffset + messageOffsetIdx int + buf []byte // opcode + len +} + +// parseIndexSection parses the index section of the file and populates the +// related fields of the structure. It must be called prior to any of the other +// access methods. +func (it *indexedMessageIterator) parseIndexSection() error { + _, err := it.rs.Seek(-8-4-8, io.SeekEnd) + if err != nil { + return err + } + buf := make([]byte, 8+4+8) + _, err = io.ReadFull(it.rs, buf) + if err != nil { + return fmt.Errorf("read error: %s", err) + } + indexOffset, offset := getUint64(buf, 0) + _, offset = getUint32(buf, offset) // crc + magic := buf[offset:] + if !bytes.Equal(magic, Magic) { + return fmt.Errorf("not an mcap file") + } + err = it.seekFile(int64(indexOffset)) + if err != nil { + return err + } + it.lexer.SetLexNext() + var msg []byte + defer func() { + it.chunksets = sortOverlappingChunks(it.chunkIndexes) + }() + + // now we're in the index data section. Read through the statistics record, + // populating the index fields. +Top: + for { + tok := it.lexer.Next() + msg = tok.bytes() + switch tok.TokenType { + case TokenChunkIndex: + chunkIndex, err := parseChunkIndex(msg) + if err != nil { + return fmt.Errorf("failed to parse chunk index: %w", err) + } + // if the chunk index overlaps with the requested parameters, append to the list + if chunkIndex.StartTime < it.end && chunkIndex.EndTime >= it.start { + // if the message index relates to any target channels, include it + for k, _ := range it.channels { + if chunkIndex.MessageIndexOffsets[k] != 0 { + it.chunkIndexes = append(it.chunkIndexes, chunkIndex) + break + } + } + } + case TokenAttachmentIndex: + attachmentIndex, err := parseAttachmentIndex(msg) + if err != nil { + return fmt.Errorf("failed to parse attachment index: %w", err) + } + it.attachmentIndexes = append(it.attachmentIndexes, attachmentIndex) + case TokenChannelInfo: + // if the channel info is one of those requested, add it to our list + channelInfo, err := parseChannelInfo(msg) + if err != nil { + return fmt.Errorf("failed to parse channel info: %w", err) + } + if len(it.topics) == 0 || it.topics[channelInfo.TopicName] { + it.channels[channelInfo.ChannelID] = channelInfo + } + case TokenEOF: + return io.EOF + case TokenStatistics: + stats := parseStatisticsRecord(msg) + it.statistics = stats + break Top + default: + return fmt.Errorf("unexpected token %s in index data section", tok) + } + } + return nil +} + +func sortOverlappingChunks(chunkIndexes []*ChunkIndex) [][]*ChunkIndex { + output := [][]*ChunkIndex{} + chunkset := []*ChunkIndex{} + sort.Slice(chunkIndexes, func(i, j int) bool { + return chunkIndexes[i].StartTime < chunkIndexes[j].StartTime + }) + + var maxend, minstart uint64 + for _, chunkIndex := range chunkIndexes { + if len(chunkset) == 0 { + chunkset = append(chunkset, chunkIndex) + maxend = chunkIndex.EndTime + minstart = chunkIndex.StartTime + continue + } + + // if this chunk index overlaps with the chunkset in hand, add it + if chunkIndex.EndTime >= minstart && chunkIndex.StartTime < maxend { + chunkset = append(chunkset, chunkIndex) + if minstart > chunkIndex.StartTime { + minstart = chunkIndex.StartTime + } + if maxend < chunkIndex.EndTime { + maxend = chunkIndex.EndTime + } + continue + } + + // else the chunk in hand is not overlapping, so close the chunkset and + // initialize a new one + output = append(output, chunkset) + chunkset = []*ChunkIndex{chunkIndex} + maxend = chunkIndex.EndTime + minstart = chunkIndex.StartTime + } + + if len(chunkset) > 0 { + output = append(output, chunkset) + } + + return output +} + +func (it *indexedMessageIterator) loadChunk(index int) error { + chunkset := it.chunksets[it.activeChunksetIndex] + chunkIndex := chunkset[index] + err := it.seekFile(int64(chunkIndex.ChunkOffset)) + if err != nil { + return err + } + tok := it.lexer.Next() + var chunk *Chunk + switch tok.TokenType { + case TokenChunk: + chunk, err = parseChunk(tok.bytes()) + if err != nil { + return fmt.Errorf("failed to parse chunk: %w", err) + } + default: + _ = tok.bytes() + return fmt.Errorf("unexpected token %s in chunk section", tok) + } + switch CompressionFormat(chunk.Compression) { + case CompressionNone: + it.activeChunkReader = bytes.NewReader(chunk.Records) + case CompressionZSTD: + buf := make([]byte, chunk.UncompressedSize) + reader, err := zstd.NewReader(bytes.NewReader(chunk.Records)) + if err != nil { + return err + } + _, err = io.ReadFull(reader, buf) + if err != nil { + return err + } + it.activeChunkReader = bytes.NewReader(buf) + case CompressionLZ4: + buf := make([]byte, chunk.UncompressedSize) + reader := lz4.NewReader(bytes.NewReader(chunk.Records)) + _, err = io.ReadFull(reader, buf) + if err != nil { + return err + } + it.activeChunkReader = bytes.NewReader(buf) + default: + return fmt.Errorf("unsupported compression format %s", chunk.Compression) + } + + it.activeChunkIndex = index + it.activeChunkLexer = NewLexer(it.activeChunkReader) + it.activeChunkLexer.SetLexNext() + return nil +} + +func (it *indexedMessageIterator) loadNextChunkset() error { + it.activeChunksetIndex++ + it.messageOffsets = it.messageOffsets[:0] + chunkset := it.chunksets[it.activeChunksetIndex] + for i, chunkIndex := range chunkset { + for channelID, offset := range chunkIndex.MessageIndexOffsets { + if _, ok := it.channels[channelID]; !ok { + continue + } + err := it.seekFile(int64(offset)) + if err != nil { + return err + } + // now we're at the message index implicated by the chunk; parse one record + var messageIndex *MessageIndex + tok := it.lexer.Next() + switch tok.TokenType { + case TokenMessageIndex: + messageIndex = parseMessageIndex(tok.bytes()) + default: + _ = tok.bytes() + return fmt.Errorf("unexpected token %s in message index section", tok) + } + for _, record := range messageIndex.Records { + if record.Timestamp >= it.start && record.Timestamp < it.end { + it.messageOffsets = append(it.messageOffsets, messageOffset{ + chunkIndex: i, + chunkOffset: int(record.Offset), + timestamp: record.Timestamp, + }) + } + } + } + } + sort.Slice(it.messageOffsets, func(i, j int) bool { + return it.messageOffsets[i].timestamp < it.messageOffsets[j].timestamp + }) + it.messageOffsetIdx = 0 + return it.loadChunk(0) +} + +func (it *indexedMessageIterator) seekFile(offset int64) error { + _, err := it.rs.Seek(offset, io.SeekStart) + if err != nil { + return err + } + return nil +} + +func (it *indexedMessageIterator) seekChunk(offset int64) error { + _, err := it.activeChunkReader.Seek(offset, io.SeekStart) + if err != nil { + return err + } + return nil +} + +func (it *indexedMessageIterator) Next() (*ChannelInfo, *Message, error) { + if it.statistics == nil { + err := it.parseIndexSection() + if err != nil { + return nil, nil, err + } + } + if it.messageOffsetIdx >= len(it.messageOffsets) { + if it.activeChunksetIndex >= len(it.chunksets)-1 { + return nil, nil, io.EOF + } + err := it.loadNextChunkset() + if err != nil { + return nil, nil, err + } + } + + messageOffset := it.messageOffsets[it.messageOffsetIdx] + it.messageOffsetIdx++ + + // if this message is on a different chunk within the chunkset, we need to + // switch to that chunk + if messageOffset.chunkIndex != it.activeChunkIndex { + err := it.loadChunk(messageOffset.chunkIndex) + if err != nil { + return nil, nil, err + } + } + + // now the active chunk matches the one for this message + err := it.seekChunk(int64(messageOffset.chunkOffset)) + if err != nil { + return nil, nil, err + } + tok := it.activeChunkLexer.Next() + switch tok.TokenType { + case TokenMessage: + msg := parseMessage(tok.bytes()) + return it.channels[msg.ChannelID], msg, nil + case TokenError: + return nil, nil, fmt.Errorf("error: %s", tok.bytes()) + case TokenEOF: // end of chunk + return nil, nil, io.EOF + default: + _ = tok.bytes() + return nil, nil, fmt.Errorf("unexpected token %s in message section", tok) + } +} diff --git a/go/libmcap/lexer.go b/go/libmcap/lexer.go new file mode 100644 index 0000000000..816e95b62a --- /dev/null +++ b/go/libmcap/lexer.go @@ -0,0 +1,326 @@ +package libmcap + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "hash/crc32" + "io" + + "github.com/klauspost/compress/zstd" + "github.com/pierrec/lz4/v4" +) + +var ( + ErrNestedChunk = errors.New("detected nested chunk") + ErrBadMagic = errors.New("not an mcap file") +) + +type TokenType int + +func (t TokenType) String() string { + switch t { + case TokenMessage: + return "message" + case TokenChannelInfo: + return "channel info" + case TokenFooter: + return "footer" + case TokenHeader: + return "header" + case TokenAttachment: + return "attachment" + case TokenAttachmentIndex: + return "attachment index" + case TokenChunk: + return "chunk" + case TokenChunkIndex: + return "chunk index" + case TokenStatistics: + return "statistics" + case TokenMessageIndex: + return "message index" + case TokenError: + return "error" + case TokenEOF: + return "eof" + } + return "unknown" +} + +func (t Token) String() string { + switch t.TokenType { + case TokenError: + return fmt.Sprintf("error: %s", string(t.bytes())) + default: + return t.TokenType.String() + } +} + +const ( + TokenMessage TokenType = iota + TokenChannelInfo + TokenFooter + TokenHeader + TokenAttachment + TokenAttachmentIndex + TokenChunkIndex + TokenStatistics + TokenChunk + TokenMessageIndex + TokenEOF + TokenError +) + +type Token struct { + TokenType TokenType + ByteCount int64 + Reader io.Reader +} + +func (t Token) bytes() []byte { + data := make([]byte, t.ByteCount) + _, _ = io.ReadFull(t.Reader, data) // TODO + return data +} + +type stateFn func(*lexer) stateFn + +type decoders struct { + lz4 *lz4.Reader + zstd *zstd.Decoder + none *bytes.Reader +} + +type lexer struct { + state stateFn + basereader io.Reader + chunkreader io.Reader + reader io.Reader + chunkReader ResettableWriteCloser + tokens chan Token + emitChunks bool + + compressedChunk []byte + chunk []byte + skipbuf []byte + decoders decoders + inChunk bool + buf []byte + validateCRC bool +} + +func (l *lexer) SetLexNext() { + l.state = lexNext +} + +func (l *lexer) Next() Token { + if l.state == nil { + return Token{TokenEOF, 0, bytes.NewReader(nil)} + } + for { + select { + case token := <-l.tokens: + return token + default: + l.state = l.state(l) + } + } +} + +func (l *lexer) emit(t TokenType, n int64, data io.Reader) { + l.tokens <- Token{t, n, data} +} + +func (l *lexer) error(err error) stateFn { + if errors.Is(err, io.EOF) || errors.Is(err, io.ErrUnexpectedEOF) { + l.emit(TokenEOF, 0, bytes.NewReader(nil)) + } else { + l.emit(TokenError, int64(len(err.Error())), bytes.NewReader([]byte(err.Error()))) + } + return nil +} + +func lexMagic(l *lexer) stateFn { + magic := make([]byte, len(Magic)) + _, err := l.reader.Read(magic) + if err != nil { + return l.error(err) + } + if !bytes.Equal(magic, Magic) { + return l.error(ErrBadMagic) + } + return lexNext +} + +func (l *lexer) setNoneDecoder(buf []byte) { + if l.decoders.none == nil { + l.decoders.none = bytes.NewReader(buf) + } else { + l.decoders.none.Reset(buf) + } + l.reader = l.decoders.none +} + +func (l *lexer) setLZ4Decoder(r io.Reader) { + if l.decoders.lz4 == nil { + l.decoders.lz4 = lz4.NewReader(r) + } else { + l.decoders.lz4.Reset(r) + } + l.reader = l.decoders.lz4 +} + +func (l *lexer) setZSTDDecoder(r io.Reader) error { + if l.decoders.zstd == nil { + decoder, err := zstd.NewReader(r) + if err != nil { + return err + } + l.decoders.zstd = decoder + } else { + err := l.decoders.zstd.Reset(r) + if err != nil { + return err + } + } + l.reader = l.decoders.zstd + return nil +} + +func skip(l *lexer, n uint64) stateFn { + if n > uint64(len(l.skipbuf)) { + l.skipbuf = make([]byte, 2*n) + } + _, err := l.reader.Read(l.skipbuf[:n]) + if err != nil { + return l.error(err) + } + return lexNext +} + +func lexChunk(l *lexer, recordSize uint64) stateFn { + if l.inChunk { + return l.error(ErrNestedChunk) + } + _, err := l.reader.Read(l.buf[:8+4+4]) + if err != nil { + return l.error(err) + } + // Skip the uncompressed size; the lexer will read messages out of the + // reader incrementally. + _ = binary.LittleEndian.Uint64(l.buf[:8]) + uncompressedCRC := binary.LittleEndian.Uint32(l.buf[8:12]) + compressionLen := binary.LittleEndian.Uint32(l.buf[12:16]) + _, err = l.reader.Read(l.buf[:compressionLen]) + if err != nil { + return l.error(err) + } + compression := l.buf[:compressionLen] + // will eof at the end of the chunk + lr := io.LimitReader(l.reader, int64(recordSize-16-uint64(compressionLen))) + switch CompressionFormat(compression) { + case CompressionNone: + l.reader = lr + case CompressionLZ4: + l.setLZ4Decoder(lr) + case CompressionZSTD: + err = l.setZSTDDecoder(lr) + if err != nil { + return l.error(err) + } + default: + return l.error(fmt.Errorf("unsupported compression: %s", string(compression))) + } + + // if we are validating the CRC, we need to fully decompress the chunk right + // here, then rewrap the decompressed data in a compatible reader after + // validation. If we are not validating CRCs, we can use incremental + // decompression for the chunk's data, which may be beneficial to streaming + // readers. + if l.validateCRC { + uncompressed, err := io.ReadAll(l.reader) + if err != nil { + return l.error(err) + } + crc := crc32.ChecksumIEEE(uncompressed) + if crc != uncompressedCRC { + return l.error(fmt.Errorf("invalid CRC: %x != %x", crc, uncompressedCRC)) + } + l.setNoneDecoder(uncompressed) + } + l.inChunk = true + return lexNext +} + +func lexNext(l *lexer) stateFn { + _, err := io.ReadFull(l.reader, l.buf[:9]) + if err != nil { + if l.inChunk && (errors.Is(err, io.ErrUnexpectedEOF) || errors.Is(err, io.EOF)) { // todo what's going on here + l.inChunk = false + l.reader = l.basereader + return lexNext + } + return l.error(err) + } + opcode := OpCode(l.buf[0]) + recordLen := binary.LittleEndian.Uint64(l.buf[1:9]) + switch opcode { + case OpHeader: + l.emit(TokenHeader, int64(recordLen), l.reader) + case OpChannelInfo: + l.emit(TokenChannelInfo, int64(recordLen), l.reader) + case OpFooter: + l.emit(TokenFooter, int64(recordLen), l.reader) + return lexMagic + case OpMessage: + l.emit(TokenMessage, int64(recordLen), l.reader) + case OpAttachment: + l.emit(TokenAttachment, int64(recordLen), l.reader) + case OpAttachmentIndex: + l.emit(TokenAttachmentIndex, int64(recordLen), l.reader) + case OpChunkIndex: + if !l.emitChunks { + return skip(l, recordLen) + } + l.emit(TokenChunkIndex, int64(recordLen), l.reader) + case OpStatistics: + l.emit(TokenStatistics, int64(recordLen), l.reader) + case OpMessageIndex: + if !l.emitChunks { + return skip(l, recordLen) + } + l.emit(TokenMessageIndex, int64(recordLen), l.reader) + case OpChunk: + if !l.emitChunks { + return lexChunk(l, recordLen) + } + l.emit(TokenChunk, int64(recordLen), l.reader) + default: + return skip(l, recordLen) + } + return lexNext +} + +type lexOpts struct { + validateCRC bool + emitChunks bool +} + +func NewLexer(r io.Reader, opts ...*lexOpts) *lexer { + var validateCRC, emitChunks bool + if len(opts) > 0 { + validateCRC = opts[0].validateCRC + emitChunks = opts[0].emitChunks + } + return &lexer{ + basereader: r, + reader: r, + tokens: make(chan Token, 1), // why + buf: make([]byte, 32), + state: lexMagic, + validateCRC: validateCRC, + emitChunks: emitChunks, + } +} diff --git a/go/libmcap/lexer_test.go b/go/libmcap/lexer_test.go new file mode 100644 index 0000000000..fd681714f1 --- /dev/null +++ b/go/libmcap/lexer_test.go @@ -0,0 +1,326 @@ +package libmcap + +import ( + "bytes" + "fmt" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestLexUnchunkedFile(t *testing.T) { + file := file( + header(), + channelInfo(), + message(), + message(), + attachment(), + attachment(), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file)) + expected := []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenAttachment, + TokenAttachment, + TokenFooter, + } + for _, tt := range expected { + tk := lexer.Next() + assert.Equal(t, tt, tk.TokenType) + } +} + +func TestRejectsUnsupportedCompression(t *testing.T) { + file := file( + chunk(t, CompressionFormat("unknown"), chunk(t, CompressionLZ4, channelInfo(), message(), message())), + ) + lexer := NewLexer(bytes.NewReader(file)) + token := lexer.Next() + assert.Equal(t, TokenError, token.TokenType) + assert.Equal(t, "unsupported compression: unknown", string(token.bytes())) +} + +func TestRejectsNestedChunks(t *testing.T) { + file := file( + header(), + chunk(t, CompressionLZ4, chunk(t, CompressionLZ4, channelInfo(), message(), message())), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file)) + expected := []TokenType{ + TokenHeader, + TokenError, + } + var tk Token + for _, tt := range expected { + tk = lexer.Next() + assert.Equal(t, tt, tk.TokenType) + } + assert.Equal(t, ErrNestedChunk.Error(), string(tk.bytes())) +} + +func TestBadMagic(t *testing.T) { + cases := []struct { + assertion string + magic []byte + }{ + { + "short magic", + make([]byte, 4), + }, + { + "invalid magic", + make([]byte, 20), + }, + } + for _, c := range cases { + t.Run(c.assertion, func(t *testing.T) { + lexer := NewLexer(bytes.NewReader(c.magic)) + tk := lexer.Next() + assert.Equal(t, TokenError, tk.TokenType) + assert.Equal(t, ErrBadMagic.Error(), string(tk.bytes())) + }) + } +} + +func TestShortMagicResultsCorrectError(t *testing.T) { + lexer := NewLexer(bytes.NewReader(make([]byte, 4))) + tk := lexer.Next() + assert.Equal(t, TokenError, tk.TokenType) + assert.Equal(t, ErrBadMagic.Error(), string(tk.bytes())) +} + +func TestReturnsEOFOnSuccessiveCalls(t *testing.T) { + lexer := NewLexer(bytes.NewReader(file())) + tk := lexer.Next() + assert.Equal(t, TokenEOF, tk.TokenType) + tk = lexer.Next() + assert.Equal(t, TokenEOF, tk.TokenType) +} + +func TestLexChunkedFile(t *testing.T) { + for _, validateCRC := range []bool{true, false} { + t.Run(fmt.Sprintf("crc validation %v", validateCRC), func(t *testing.T) { + for _, compression := range []CompressionFormat{ + CompressionLZ4, + CompressionZSTD, + CompressionNone, + } { + t.Run(fmt.Sprintf("chunked %s", compression), func(t *testing.T) { + file := file( + header(), + chunk(t, compression, channelInfo(), message(), message()), + chunk(t, compression, channelInfo(), message(), message()), + attachment(), attachment(), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file), &lexOpts{ + validateCRC: validateCRC, + }) + expected := []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenAttachment, + TokenAttachment, + TokenFooter, + TokenEOF, + } + for i, tt := range expected { + tk := lexer.Next() + assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) + } + }) + } + }) + } +} + +func TestSkipsUnknownOpcodes(t *testing.T) { + unrecognized := make([]byte, 9) + unrecognized[0] = 0x99 // zero-length unknown record + file := file( + header(), + unrecognized, + message(), + ) + lexer := NewLexer(bytes.NewReader(file)) + expected := []TokenType{TokenHeader, TokenMessage} + for i, tt := range expected { + tk := lexer.Next() + _ = tk.bytes() + assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) + } +} + +func TestChunkCRCValidation(t *testing.T) { + t.Run("validates valid file", func(t *testing.T) { + file := file( + header(), + chunk(t, CompressionLZ4, channelInfo(), message(), message()), + chunk(t, CompressionLZ4, channelInfo(), message(), message()), + attachment(), attachment(), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file), &lexOpts{ + validateCRC: true, + }) + expected := []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenAttachment, + TokenAttachment, + TokenFooter, + TokenEOF, + } + for i, tt := range expected { + tk := lexer.Next() + _ = tk.bytes() // always must consume the reader + assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) + } + }) + t.Run("validation fails on corrupted file", func(t *testing.T) { + badchunk := chunk(t, CompressionLZ4, channelInfo(), message(), message()) + badchunk[20] = 0x00 // corrupt the CRC + file := file( + header(), + chunk(t, CompressionLZ4, channelInfo(), message(), message()), + badchunk, + attachment(), attachment(), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file), &lexOpts{ + validateCRC: true, + }) + expected := []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenMessage, + TokenError, + } + for i, tt := range expected { + tk := lexer.Next() + data := tk.bytes() // always must consume the reader + if tt == TokenError { + assert.Equal(t, "invalid CRC: ffaaf97a != aaf97a", string(data)) + } + assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) + } + }) +} + +func TestChunkEmission(t *testing.T) { + for _, validateCRC := range []bool{ + true, + false, + } { + t.Run(fmt.Sprintf("crc validation %v", validateCRC), func(t *testing.T) { + for _, compression := range []CompressionFormat{ + CompressionLZ4, + CompressionZSTD, + CompressionNone, + } { + t.Run(fmt.Sprintf("chunked %s", compression), func(t *testing.T) { + file := file( + header(), + chunk(t, compression, channelInfo(), message(), message()), + chunk(t, compression, channelInfo(), message(), message()), + attachment(), attachment(), + footer(), + ) + lexer := NewLexer(bytes.NewReader(file), &lexOpts{ + validateCRC: validateCRC, + emitChunks: true, + }) + expected := []TokenType{ + TokenHeader, + TokenChunk, + TokenChunk, + TokenAttachment, + TokenAttachment, + TokenFooter, + TokenEOF, + } + for i, tt := range expected { + tk := lexer.Next() + _ = tk.bytes() // always must consume the reader + assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) + } + }) + } + }) + } +} + +func BenchmarkLexer(b *testing.B) { + cases := []struct { + assertion string + inputfile string + }{ + { + "demo.bag", + "../../testdata/mcap/demo.mcap", + }, + //{ + // "cal_loop.bag", + // "../../testdata/cal_loop.mcap", + //}, + //{ + // "turtlebot.bag", + // "../../testdata/turtlebot3-burger-2021-04-22-15-35-44.mcap", + //}, + } + for _, c := range cases { + input, err := os.ReadFile(c.inputfile) + assert.Nil(b, err) + reader := &bytes.Reader{} + b.ResetTimer() + + msg := make([]byte, 1024*1024) + b.Run(c.assertion, func(b *testing.B) { + for n := 0; n < b.N; n++ { + t0 := time.Now() + var tokens, bytecount int64 + reader.Reset(input) + lexer := NewLexer(reader) + for { + tok := lexer.Next() + if tok.TokenType == TokenEOF { + break + } + + if int64(len(msg)) < tok.ByteCount { + msg = make([]byte, tok.ByteCount) + } + n, err := tok.Reader.Read(msg[:tok.ByteCount]) + if err != nil { + b.Errorf("parse fail: %s", err) + } + tokens++ + bytecount += int64(n) + } + elapsed := time.Since(t0) + mbread := bytecount / (1024 * 1024) + b.ReportMetric(float64(mbread)/elapsed.Seconds(), "MB/sec") + b.ReportMetric(float64(tokens)/elapsed.Seconds(), "tokens/sec") + b.ReportMetric(float64(elapsed.Nanoseconds())/float64(tokens), "ns/token") + } + }) + } +} diff --git a/go/libmcap/mcap.go b/go/libmcap/mcap.go new file mode 100644 index 0000000000..a96ab73ec7 --- /dev/null +++ b/go/libmcap/mcap.go @@ -0,0 +1,339 @@ +package libmcap + +import ( + "bytes" + "encoding/binary" + "fmt" + "hash" + "hash/crc32" + "io" + "math" + "sort" + "time" +) + +var ( + Magic = []byte{0x89, 'M', 'C', 'A', 'P', 0x30, '\r', '\n'} +) + +const ( + CompressionLZ4 CompressionFormat = "lz4" + CompressionZSTD CompressionFormat = "zstd" + CompressionNone CompressionFormat = "none" +) + +type BufCloser struct { + b *bytes.Buffer +} + +func (b BufCloser) Close() error { + return nil +} + +func (b BufCloser) Write(p []byte) (int, error) { + return b.b.Write(p) +} + +func (b BufCloser) Reset(w io.Writer) { + b.b.Reset() +} + +type ResettableReader interface { + io.ReadCloser + Reset(io.Reader) +} + +type ResettableWriteCloser interface { + io.WriteCloser + Reset(io.Writer) +} + +type CRCWriter struct { + w io.Writer + crc hash.Hash32 +} + +func (w *CRCWriter) Write(p []byte) (int, error) { + _, _ = w.crc.Write(p) + return w.w.Write(p) +} + +func (w *CRCWriter) Checksum() uint32 { + return w.crc.Sum32() +} + +func (w *CRCWriter) Reset() { + w.crc = crc32.NewIEEE() +} + +func NewCRCWriter(w io.Writer) *CRCWriter { + return &CRCWriter{ + w: w, + crc: crc32.NewIEEE(), + } +} + +type WriteSizer struct { + w *CRCWriter + size uint64 +} + +func (w *WriteSizer) Write(p []byte) (int, error) { + w.size += uint64(len(p)) + return w.w.Write(p) +} + +func NewWriteSizer(w io.Writer) *WriteSizer { + return &WriteSizer{ + w: NewCRCWriter(w), + } +} + +func (w *WriteSizer) Size() uint64 { + return w.size +} + +func (w *WriteSizer) Checksum() uint32 { + return w.w.Checksum() +} + +func (w *WriteSizer) Reset() { + w.w.crc = crc32.NewIEEE() +} + +func putByte(buf []byte, x byte) int { + buf[0] = x + return 1 +} + +func getUint16(buf []byte, offset int) (uint16, int) { + return binary.LittleEndian.Uint16(buf[offset:]), offset + 2 +} + +func getUint32(buf []byte, offset int) (uint32, int) { + return binary.LittleEndian.Uint32(buf[offset:]), offset + 4 +} + +func getUint64(buf []byte, offset int) (uint64, int) { + return binary.LittleEndian.Uint64(buf[offset:]), offset + 8 +} + +func putUint16(buf []byte, i uint16) int { + binary.LittleEndian.PutUint16(buf, i) + return 2 +} + +func putUint32(buf []byte, i uint32) int { + binary.LittleEndian.PutUint32(buf, i) + return 4 +} + +func putUint64(buf []byte, i uint64) int { + binary.LittleEndian.PutUint64(buf, i) + return 8 +} + +func putPrefixedString(buf []byte, s string) int { + offset := putUint32(buf, uint32(len(s))) + offset += copy(buf[offset:], s) + return offset +} + +func putPrefixedBytes(buf []byte, s []byte) int { + offset := putUint32(buf, uint32(len(s))) + offset += copy(buf[offset:], s) + return offset +} + +type CompressionFormat string + +const ( + OpHeader OpCode = 0x01 + OpFooter OpCode = 0x02 + OpChannelInfo OpCode = 0x03 + OpMessage OpCode = 0x04 + OpChunk OpCode = 0x05 + OpMessageIndex OpCode = 0x06 + OpChunkIndex OpCode = 0x07 + OpAttachment OpCode = 0x08 + OpAttachmentIndex OpCode = 0x09 + OpStatistics OpCode = 0x0a +) + +type OpCode byte + +type Message struct { + ChannelID uint16 + Sequence uint32 + RecordTime uint64 + PublishTime uint64 + Data []byte + channelInfo *ChannelInfo +} + +type ChannelInfo struct { + ChannelID uint16 + TopicName string + Encoding string + SchemaName string + Schema []byte + UserData map[string]string +} + +type Attachment struct { + Name string + RecordTime uint64 + ContentType string + Data []byte +} + +type CompressionSummary struct { + Algorithm CompressionFormat + ChunkCount uint64 +} + +type TypeSummary struct { + SchemaName string +} + +type TopicSummary struct { + TopicName string + MessageCount uint64 + SchemaName string +} + +// TODO md5sum in rosbags does not have a place in mcap + +type Summary struct { + Duration time.Duration + Start uint64 + End uint64 + Size uint64 + Messages uint64 + Compression []CompressionSummary + Types []TypeSummary + Topics []TopicSummary +} + +type AttachmentIndex struct { + RecordTime uint64 + AttachmentSize uint64 + Name string + ContentType string + Offset uint64 +} + +type Footer struct { + IndexOffset uint64 + IndexCRC uint32 +} + +type ChunkIndex struct { + StartTime uint64 + EndTime uint64 + ChunkOffset uint64 + MessageIndexOffsets map[uint16]uint64 + MessageIndexLength uint64 + Compression CompressionFormat + CompressedSize uint64 + UncompressedSize uint64 +} + +type Statistics struct { + MessageCount uint64 + ChannelCount uint32 + AttachmentCount uint32 + ChunkCount uint32 + ChannelStats map[uint16]uint64 + channels map[uint16]*ChannelInfo +} + +type Info struct { + Statistics *Statistics + Channels map[uint16]*ChannelInfo + ChunkIndexes []*ChunkIndex + Start time.Time + End time.Time +} + +func (i Info) ChannelCounts() map[string]uint64 { + counts := make(map[string]uint64) + for k, v := range i.Statistics.ChannelStats { + channel := i.Channels[k] + counts[channel.TopicName] = v + } + return counts +} + +func (i Info) String() string { + buf := &bytes.Buffer{} + start := uint64(math.MaxUint64) + end := uint64(0) + + compressionFormatStats := make(map[CompressionFormat]struct { + count int + compressedSize uint64 + uncompressedSize uint64 + }) + for _, ci := range i.ChunkIndexes { + if ci.StartTime < start { + start = ci.StartTime + } + if ci.EndTime > end { + end = ci.EndTime + } + stats := compressionFormatStats[ci.Compression] + stats.count++ + stats.compressedSize += ci.CompressedSize + stats.uncompressedSize += ci.UncompressedSize + compressionFormatStats[ci.Compression] = stats + } + + starttime := time.Unix(int64(start/1e9), int64(start%1e9)) + endtime := time.Unix(int64(end/1e9), int64(end%1e9)) + + fmt.Fprintf(buf, "duration: %s\n", endtime.Sub(starttime)) + fmt.Fprintf(buf, "start: %s\n", starttime.Format(time.RFC3339Nano)) + fmt.Fprintf(buf, "end: %s\n", endtime.Format(time.RFC3339Nano)) + fmt.Fprintf(buf, "messages: %d\n", i.Statistics.MessageCount) + fmt.Fprintf(buf, "chunks:\n") + chunkCount := len(i.ChunkIndexes) + for k, v := range compressionFormatStats { + compressionRatio := 100 * (1 - float64(v.compressedSize)/float64(v.uncompressedSize)) + fmt.Fprintf(buf, "\t%s: [%d/%d chunks] (%.2f%%) \n", k, v.count, chunkCount, compressionRatio) + } + fmt.Fprintf(buf, "channels\n") + + chanIDs := []uint16{} + for chanID := range i.Channels { + chanIDs = append(chanIDs, chanID) + } + sort.Slice(chanIDs, func(i, j int) bool { + return chanIDs[i] < chanIDs[j] + }) + for _, chanID := range chanIDs { + channel := i.Channels[chanID] + fmt.Fprintf(buf, "\t(%d) %s: %d msgs\n", channel.ChannelID, channel.TopicName, i.Statistics.ChannelStats[chanID]) + } + fmt.Fprintf(buf, "attachments: %d", i.Statistics.AttachmentCount) + return buf.String() +} + +type MessageIndexRecord struct { + Timestamp uint64 + Offset uint64 +} + +type MessageIndex struct { + ChannelID uint16 + Count uint32 + Records []MessageIndexRecord + CRC uint32 +} + +type Chunk struct { + UncompressedSize uint64 + UncompressedCRC uint32 + Compression string + Records []byte +} diff --git a/go/libmcap/reader.go b/go/libmcap/reader.go new file mode 100644 index 0000000000..47e5eae987 --- /dev/null +++ b/go/libmcap/reader.go @@ -0,0 +1,310 @@ +package libmcap + +import ( + "encoding/binary" + "fmt" + "io" + "math" +) + +func readPrefixedString(data []byte, offset int) (string, int, error) { + if len(data[offset:]) < 4 { + return "", 0, io.ErrShortBuffer + } + length := int(binary.LittleEndian.Uint32(data[offset : offset+4])) + if len(data[offset+4:]) < length { + return "", 0, io.ErrShortBuffer + } + return string(data[offset+4 : offset+length+4]), offset + 4 + length, nil +} + +func readPrefixedBytes(data []byte, offset int) ([]byte, int, error) { + if len(data[offset:]) < 4 { + return nil, 0, io.ErrShortBuffer + } + length := int(binary.LittleEndian.Uint32(data[offset : offset+4])) + if len(data[offset+4:]) < length { + return nil, 0, io.ErrShortBuffer + } + return data[offset+4 : offset+length+4], offset + 4 + length, nil +} + +func parseChunk(buf []byte) (*Chunk, error) { + uncompressedSize, offset := getUint64(buf, 0) + uncompressedCRC, offset := getUint32(buf, offset) + compression, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + records := buf[offset:] + return &Chunk{ + UncompressedSize: uncompressedSize, + UncompressedCRC: uncompressedCRC, + Compression: compression, + Records: records, + }, nil +} + +func parseMessageIndex(buf []byte) *MessageIndex { + channelID, offset := getUint16(buf, 0) + count, offset := getUint32(buf, offset) + _, offset = getUint32(buf, offset) + var recordTime uint64 + var recordOffset uint64 + records := make([]MessageIndexRecord, count) + for i := range records { + recordTime, offset = getUint64(buf, offset) + recordOffset, offset = getUint64(buf, offset) + records[i] = MessageIndexRecord{ + Timestamp: recordTime, + Offset: recordOffset, + } + } + crc, offset := getUint32(buf, offset) + return &MessageIndex{ + ChannelID: channelID, + Count: count, + Records: records, + CRC: crc, + } +} + +func parseAttachmentIndex(buf []byte) (*AttachmentIndex, error) { + recordTime, offset := getUint64(buf, 0) + dataSize, offset := getUint64(buf, offset) + name, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + contentType, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + attachmentOffset, offset := getUint64(buf, offset) + return &AttachmentIndex{ + RecordTime: recordTime, + AttachmentSize: dataSize, + Name: name, + ContentType: contentType, + Offset: attachmentOffset, + }, nil +} + +func parseMessage(buf []byte) *Message { + channelID, offset := getUint16(buf, 0) + sequence, offset := getUint32(buf, offset) + publishTime, offset := getUint64(buf, offset) + recordTime, offset := getUint64(buf, offset) + data := buf[offset:] + return &Message{ + ChannelID: channelID, + Sequence: sequence, + RecordTime: recordTime, + PublishTime: publishTime, + Data: data, + } +} + +func parseChunkIndex(buf []byte) (*ChunkIndex, error) { + startTime, offset := getUint64(buf, 0) + endTime, offset := getUint64(buf, offset) + chunkOffset, offset := getUint64(buf, offset) + msgIndexLen, offset := getUint32(buf, offset) + messageIndexOffsets := make(map[uint16]uint64) + var chanID uint16 + var indexOffset uint64 + inset := 0 + for inset < int(msgIndexLen) { + chanID, inset = getUint16(buf[offset:], inset) + indexOffset, inset = getUint64(buf[offset:], inset) + messageIndexOffsets[chanID] = indexOffset + } + offset += inset + msgIndexLength, offset := getUint64(buf, offset) + compression, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + compressedSize, offset := getUint64(buf, offset) + uncompressedSize, offset := getUint64(buf, offset) + return &ChunkIndex{ + StartTime: startTime, + EndTime: endTime, + ChunkOffset: chunkOffset, + MessageIndexOffsets: messageIndexOffsets, + MessageIndexLength: msgIndexLength, + Compression: CompressionFormat(compression), + CompressedSize: compressedSize, + UncompressedSize: uncompressedSize, + }, nil +} + +func parseChannelInfo(buf []byte) (*ChannelInfo, error) { + channelID, offset := getUint16(buf, 0) + topicName, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + encoding, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + schemaName, offset, err := readPrefixedString(buf, offset) + if err != nil { + return nil, err + } + schema, offset, err := readPrefixedBytes(buf, offset) + if err != nil { + return nil, err + } + userdata, offset, err := readPrefixedMap(buf, offset) + if err != nil { + return nil, err + } + return &ChannelInfo{ + ChannelID: channelID, + TopicName: topicName, + Encoding: encoding, + SchemaName: schemaName, + Schema: schema, + UserData: userdata, + }, nil +} + +func parseStatisticsRecord(buf []byte) *Statistics { + messageCount, offset := getUint64(buf, 0) + channelCount, offset := getUint32(buf, offset) + attachmentCount, offset := getUint32(buf, offset) + chunkCount, offset := getUint32(buf, offset) + + // TODO this is not actually necessary, since the bytes are at the end of + // the record + _, offset = getUint32(buf, offset) + var chanID uint16 + var channelMessageCount uint64 + channelStats := make(map[uint16]uint64) + for offset < len(buf) { + chanID, offset = getUint16(buf, offset) + channelMessageCount, offset = getUint64(buf, offset) + channelStats[chanID] = channelMessageCount + } + return &Statistics{ + MessageCount: messageCount, + ChannelCount: channelCount, + AttachmentCount: attachmentCount, + ChunkCount: chunkCount, + ChannelStats: channelStats, + } +} + +func readPrefixedMap(data []byte, offset int) (map[string]string, int, error) { + var err error + var key, value string + var inset int + m := make(map[string]string) + maplen, offset := getUint32(data, offset) + for uint32(offset+inset) < uint32(offset)+maplen { + key, inset, err = readPrefixedString(data[offset:], inset) + if err != nil { + return nil, 0, err + } + value, inset, err = readPrefixedString(data[offset:], inset) + if err != nil { + return nil, 0, err + } + m[key] = value + } + return m, offset + inset, nil +} + +type Reader struct { + l *lexer + r io.Reader + rs io.ReadSeeker + channels map[uint16]*ChannelInfo + statistics *Statistics + chunkIndexes []*ChunkIndex + attachmentIndexes []*AttachmentIndex +} + +type MessageIterator interface { + Next() (*ChannelInfo, *Message, error) +} + +func (r *Reader) unindexedIterator(topics []string, start uint64, end uint64) *unindexedMessageIterator { + topicMap := make(map[string]bool) + for _, topic := range topics { + topicMap[topic] = true + } + r.l.emitChunks = false + return &unindexedMessageIterator{ + lexer: r.l, + channels: make(map[uint16]*ChannelInfo), + topics: topicMap, + start: start, + end: end, + } +} + +func (r *Reader) indexedMessageIterator(topics []string, start uint64, end uint64) *indexedMessageIterator { + topicMap := make(map[string]bool) + for _, topic := range topics { + topicMap[topic] = true + } + r.l.emitChunks = true + return &indexedMessageIterator{ + lexer: r.l, + rs: r.rs, + channels: make(map[uint16]*ChannelInfo), + topics: topicMap, + start: start, + end: end, + activeChunksetIndex: -1, + activeChunkIndex: -1, + } +} + +func (r *Reader) Messages( + start int64, + end int64, + topics []string, + useIndex bool, +) (MessageIterator, error) { + if useIndex { + if rs, ok := r.r.(io.ReadSeeker); ok { + r.rs = rs + } else { + return nil, fmt.Errorf("indexed reader requires a seekable reader") + } + return r.indexedMessageIterator(topics, uint64(start), uint64(end)), nil + } + return r.unindexedIterator(topics, uint64(start), uint64(end)), nil +} + +func (r *Reader) Info() (*Info, error) { + it := r.indexedMessageIterator(nil, 0, math.MaxUint64) + err := it.parseIndexSection() + if err != nil { + return nil, err + } + return &Info{ + Statistics: it.statistics, + Channels: it.channels, + ChunkIndexes: it.chunkIndexes, + }, nil +} + +func NewReader(r io.Reader) *Reader { + var rs io.ReadSeeker + if readseeker, ok := r.(io.ReadSeeker); ok { + rs = readseeker + } + return &Reader{ + l: NewLexer(r, &lexOpts{ + emitChunks: true, + }), + r: r, + rs: rs, + channels: make(map[uint16]*ChannelInfo), + } +} diff --git a/go/libmcap/reader_test.go b/go/libmcap/reader_test.go new file mode 100644 index 0000000000..f39f758fee --- /dev/null +++ b/go/libmcap/reader_test.go @@ -0,0 +1,272 @@ +package libmcap + +import ( + "bytes" + "errors" + "fmt" + "io" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestReadPrefixedBytes(t *testing.T) { + cases := []struct { + assertion string + data []byte + expectedBytes []byte + expectedOffset int + expectedError error + }{ + { + "short length", + make([]byte, 3), + nil, + 0, + io.ErrShortBuffer, + }, + { + "short content", + []byte{0x01, 0x00, 0x00, 0x00}, + nil, + 0, + io.ErrShortBuffer, + }, + { + "good bytes", + []byte{0x05, 0x00, 0x00, 0x00, 'H', 'e', 'l', 'l', 'o'}, + []byte{'H', 'e', 'l', 'l', 'o'}, + 9, + nil, + }, + } + for _, c := range cases { + t.Run(c.assertion, func(t *testing.T) { + s, off, err := readPrefixedBytes(c.data, 0) + assert.ErrorIs(t, c.expectedError, err) + assert.Equal(t, c.expectedBytes, s) + assert.Equal(t, c.expectedOffset, off) + }) + } +} + +func TestReadPrefixedString(t *testing.T) { + cases := []struct { + assertion string + data []byte + expectedString string + expectedOffset int + expectedError error + }{ + { + "short length", + make([]byte, 3), + "", + 0, + io.ErrShortBuffer, + }, + { + "short content", + []byte{0x01, 0x00, 0x00, 0x00}, + "", + 0, + io.ErrShortBuffer, + }, + { + "good string", + []byte{0x05, 0x00, 0x00, 0x00, 0x48, 0x65, 0x6c, 0x6c, 0x6f}, + "Hello", + 9, + nil, + }, + } + for _, c := range cases { + t.Run(c.assertion, func(t *testing.T) { + s, off, err := readPrefixedString(c.data, 0) + assert.ErrorIs(t, c.expectedError, err) + assert.Equal(t, c.expectedString, s) + assert.Equal(t, c.expectedOffset, off) + }) + } +} + +func TestMessageReading(t *testing.T) { + for _, compression := range []CompressionFormat{ + CompressionNone, + CompressionZSTD, + CompressionLZ4, + } { + t.Run(fmt.Sprintf("writer compression %s", compression), func(t *testing.T) { + for _, useIndex := range []bool{ + true, + false, + } { + t.Run(fmt.Sprintf("indexed reading %v", useIndex), func(t *testing.T) { + buf := &bytes.Buffer{} + w, err := NewWriter(buf, &WriterOptions{ + Chunked: true, + Compression: compression, + IncludeCRC: true, + }) + assert.Nil(t, err) + err = w.WriteHeader("ros1", "", map[string]string{"foo": "bar"}) + assert.Nil(t, err) + err = w.WriteChannelInfo(&ChannelInfo{ + ChannelID: 0, + TopicName: "/test1", + Encoding: "ros1", + SchemaName: "foo", + Schema: []byte{}, + }) + err = w.WriteChannelInfo(&ChannelInfo{ + ChannelID: 1, + TopicName: "/test2", + Encoding: "ros1", + SchemaName: "foo", + Schema: []byte{}, + }) + assert.Nil(t, err) + for i := 0; i < 1000; i++ { + err := w.WriteMessage(&Message{ + ChannelID: uint16(i % 2), + Sequence: 0, + RecordTime: uint64(i), + PublishTime: uint64(i), + Data: []byte{1, 2, 3, 4}, + }) + assert.Nil(t, err) + } + w.Close() + t.Run("read all messages", func(t *testing.T) { + reader := bytes.NewReader(buf.Bytes()) + r := NewReader(reader) + it, err := r.Messages(0, 10000, []string{}, useIndex) + assert.Nil(t, err) + c := 0 + for { + ci, msg, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(t, err) + assert.NotNil(t, ci) + assert.NotNil(t, msg) + assert.Equal(t, msg.ChannelID, ci.ChannelID) + c++ + } + assert.Equal(t, 1000, c) + }) + t.Run("read messages on one topic", func(t *testing.T) { + reader := bytes.NewReader(buf.Bytes()) + r := NewReader(reader) + it, err := r.Messages(0, 10000, []string{"/test1"}, useIndex) + assert.Nil(t, err) + c := 0 + for { + ci, msg, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(t, err) + assert.NotNil(t, ci) + assert.NotNil(t, msg) + assert.Equal(t, msg.ChannelID, ci.ChannelID) + c++ + } + assert.Equal(t, 500, c) + }) + t.Run("read messages on multiple topics", func(t *testing.T) { + reader := bytes.NewReader(buf.Bytes()) + r := NewReader(reader) + it, err := r.Messages(0, 10000, []string{"/test1", "/test2"}, useIndex) + assert.Nil(t, err) + c := 0 + for { + ci, msg, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(t, err) + assert.NotNil(t, ci) + assert.NotNil(t, msg) + assert.Equal(t, msg.ChannelID, ci.ChannelID) + c++ + } + assert.Equal(t, 1000, c) + }) + t.Run("read messages in time range", func(t *testing.T) { + reader := bytes.NewReader(buf.Bytes()) + r := NewReader(reader) + it, err := r.Messages(100, 200, []string{}, useIndex) + assert.Nil(t, err) + c := 0 + for { + _, _, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(t, err) + c++ + } + assert.Equal(t, 100, c) + }) + }) + } + }) + } +} + +func TestReaderCounting(t *testing.T) { + for _, indexed := range []bool{true, false} { + t.Run(fmt.Sprintf("indexed %v", indexed), func(t *testing.T) { + bagfile, err := os.Open("../../testdata/bags/demo.bag") + assert.Nil(t, err) + defer bagfile.Close() + mcapfile := &bytes.Buffer{} + err = Bag2MCAP(bagfile, mcapfile) + assert.Nil(t, err) + r := NewReader(bytes.NewReader(mcapfile.Bytes())) + it, err := r.Messages(0, time.Now().UnixNano(), []string{}, indexed) + assert.Nil(t, err) + c := 0 + for { + _, _, err := it.Next() + if errors.Is(err, io.EOF) { + break + } + assert.Nil(t, err) + c++ + } + assert.Equal(t, 1606, c) + }) + } +} + +func TestMCAPInfo(t *testing.T) { + bagfile, err := os.Open("../../testdata/bags/demo.bag") + assert.Nil(t, err) + defer bagfile.Close() + mcapfile := &bytes.Buffer{} + err = Bag2MCAP(bagfile, mcapfile) + assert.Nil(t, err) + r := NewReader(bytes.NewReader(mcapfile.Bytes())) + info, err := r.Info() + assert.Nil(t, err) + assert.Equal(t, uint64(1606), info.Statistics.MessageCount) + assert.Equal(t, uint32(7), info.Statistics.ChannelCount) + assert.Equal(t, uint32(27), info.Statistics.ChunkCount) + expectedCounts := map[string]uint64{ + "/radar/points": 156, + "/radar/tracks": 156, + "/radar/range": 156, + "/velodyne_points": 78, + "/diagnostics": 52, + "/tf": 774, + "/image_color/compressed": 234, + } + for k, v := range info.ChannelCounts() { + assert.Equal(t, expectedCounts[k], v, "mismatch on %s - got %d", k, uint64(v)) + } +} diff --git a/go/libmcap/testutils.go b/go/libmcap/testutils.go new file mode 100644 index 0000000000..942c079942 --- /dev/null +++ b/go/libmcap/testutils.go @@ -0,0 +1,117 @@ +package libmcap + +import ( + "bytes" + "hash/crc32" + "io" + "testing" + + "github.com/klauspost/compress/zstd" + "github.com/pierrec/lz4/v4" + "github.com/stretchr/testify/assert" +) + +func flatten(slices ...[]byte) []byte { + var flattened []byte + for _, s := range slices { + flattened = append(flattened, s...) + } + return flattened +} + +func file(records ...[]byte) []byte { + var file [][]byte + file = append(file, Magic) + file = append(file, records...) + file = append(file, Magic) + return flatten(file...) +} + +func footer() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpFooter) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} + +func header() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpHeader) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} + +func channelInfo() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpChannelInfo) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} + +func message() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpMessage) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} + +func chunk(t *testing.T, compression CompressionFormat, records ...[]byte) []byte { + data := flatten(records...) + buf := &bytes.Buffer{} + switch compression { + case CompressionLZ4: + w := lz4.NewWriter(buf) + _, err := io.Copy(w, bytes.NewReader(data)) + assert.Nil(t, err) + w.Close() + case CompressionZSTD: + w, err := zstd.NewWriter(buf) + if err != nil { + t.Errorf("failed to create zstd writer: %s", err) + } + _, err = io.Copy(w, bytes.NewReader(data)) + assert.Nil(t, err) + w.Close() + case CompressionNone: + _, err := buf.Write(data) + assert.Nil(t, err) + default: + _, err := buf.Write(data) // unrecognized compression + assert.Nil(t, err) + } + compressionLen := len(compression) + compressedLen := buf.Len() + uncompressedLen := len(data) + msglen := uint64(8 + 4 + 4 + compressionLen + compressedLen) + record := make([]byte, msglen+9) + record[0] = byte(OpChunk) + offset := 1 + offset += putUint64(record[offset:], msglen) + offset += putUint64(record[offset:], uint64(uncompressedLen)) + crc := crc32.NewIEEE() + _, _ = crc.Write(data) + offset += putUint32(record[offset:], crc.Sum32()) + offset += putPrefixedString(record[offset:], string(compression)) + offset += copy(record[offset:], buf.Bytes()) + return record +} + +func attachment() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpAttachment) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} + +func statistics() []byte { + buf := make([]byte, 9) + buf[0] = byte(OpStatistics) + offset := 1 + offset += putUint64(buf[offset:], 0) + return buf +} diff --git a/go/libmcap/unindexed_message_iterator.go b/go/libmcap/unindexed_message_iterator.go new file mode 100644 index 0000000000..faef46bb46 --- /dev/null +++ b/go/libmcap/unindexed_message_iterator.go @@ -0,0 +1,53 @@ +package libmcap + +import ( + "fmt" + "io" +) + +type unindexedMessageIterator struct { + lexer *lexer + channels map[uint16]*ChannelInfo + topics map[string]bool + start uint64 + end uint64 +} + +func (it *unindexedMessageIterator) Next() (*ChannelInfo, *Message, error) { + for { + token := it.lexer.Next() + switch token.TokenType { + case TokenError: + return nil, nil, fmt.Errorf("%s", token.bytes()) + case TokenEOF: + return nil, nil, io.EOF + case TokenChannelInfo: + channelInfo, err := parseChannelInfo(token.bytes()) + if err != nil { + return nil, nil, fmt.Errorf("failed to parse channel info: %w", err) + } + if _, ok := it.channels[channelInfo.ChannelID]; !ok { + if len(it.topics) == 0 || it.topics[channelInfo.TopicName] { + it.channels[channelInfo.ChannelID] = channelInfo + } + } + case TokenMessage: + message := parseMessage(token.bytes()) + if _, ok := it.channels[message.ChannelID]; !ok { + // skip messages on channels we don't know about. Note that if + // an unindexed reader encounters a message it would be + // interested in, but has not yet encountered the corresponding + // channel ID, it has no option but to skip. + continue + } + if message.RecordTime >= uint64(it.start) && message.RecordTime < uint64(it.end) { + return it.channels[message.ChannelID], message, nil + } + default: + _, err := io.CopyN(io.Discard, token.Reader, token.ByteCount) + if err != nil { + return nil, nil, err + } + } + } +} diff --git a/go/libmcap/writer.go b/go/libmcap/writer.go new file mode 100644 index 0000000000..ad7d05944b --- /dev/null +++ b/go/libmcap/writer.go @@ -0,0 +1,447 @@ +package libmcap + +import ( + "bytes" + "fmt" + "hash/crc32" + "io" + "math" + "sort" + + "github.com/klauspost/compress/zstd" + "github.com/pierrec/lz4/v4" +) + +type Writer struct { + channels map[uint16]*ChannelInfo + w *WriteSizer + buf8 []byte + msg []byte + chunked bool + includeCRC bool + uncompressed *bytes.Buffer + chunksize int64 + compressed *bytes.Buffer + compression CompressionFormat + stats *Statistics + messageIndexes map[uint16]*MessageIndex + chunkIndexes []*ChunkIndex + attachmentIndexes []*AttachmentIndex + + compressedWriter *CountingCRCWriter +} + +func (w *Writer) writeRecord(writer io.Writer, op OpCode, data []byte) (int, error) { + c, err := writer.Write([]byte{byte(op)}) + if err != nil { + return c, err + } + putUint64(w.buf8, uint64(len(data))) + n, err := writer.Write(w.buf8) + c += n + if err != nil { + return c, err + } + n, err = writer.Write(data) + c += n + if err != nil { + return c, err + } + return c, nil +} + +func (w *Writer) writeChunk() error { + err := w.compressedWriter.Close() + if err != nil { + return err + } + crc := w.compressedWriter.CRC() + compressedlen := w.compressed.Len() + uncompressedlen := w.compressedWriter.Size() + msglen := 8 + 4 + 4 + len(w.compression) + compressedlen + chunkStartOffset := w.w.Size() + + // when writing a chunk, we don't go through writerecord to avoid needing to + // materialize the compressed data again. Instead, write the leading bytes + // then copy from the compressed data buffer. + buf := make([]byte, 1+8+8+4+4+len(w.compression)) + offset := putByte(buf, byte(OpChunk)) + offset += putUint64(buf[offset:], uint64(msglen)) + offset += putUint64(buf[offset:], uint64(uncompressedlen)) + offset += putUint32(buf[offset:], crc) + offset += putPrefixedString(buf[offset:], string(w.compression)) + _, err = w.w.Write(buf[:offset]) + if err != nil { + return err + } + // copy the compressed data buffer, then reset it + _, err = io.Copy(w.w, w.compressed) + if err != nil { + return err + } + w.compressed.Reset() + w.compressedWriter.Reset(w.compressed) + w.compressedWriter.ResetSize() + w.compressedWriter.ResetCRC() + + // TODO change spec no chunk index record if no messages in the chunk + msgidxOffsets := make(map[uint16]uint64) + var start, end uint64 + start = math.MaxInt64 + end = 0 + messageIndexStart := w.w.Size() + for _, msgidx := range w.messageIndexes { + // TODO evaluate custom sort for mostly sorted input + sort.Slice(msgidx.Records, func(i, j int) bool { + return msgidx.Records[i].Timestamp < msgidx.Records[j].Timestamp + }) + + if first := msgidx.Records[0].Timestamp; first < start { + start = first + } + if last := msgidx.Records[len(msgidx.Records)-1].Timestamp; last > end { + end = last + } + msgidxOffsets[msgidx.ChannelID] = w.w.Size() + err = w.WriteMessageIndex(msgidx) + if err != nil { + return err + } + } + messageIndexEnd := w.w.Size() + messageIndexLength := messageIndexEnd - messageIndexStart + w.chunkIndexes = append(w.chunkIndexes, &ChunkIndex{ + StartTime: start, + EndTime: end, + ChunkOffset: chunkStartOffset, + MessageIndexOffsets: msgidxOffsets, + MessageIndexLength: messageIndexLength, + Compression: w.compression, + CompressedSize: uint64(compressedlen), + UncompressedSize: uint64(uncompressedlen), + }) + for k := range w.messageIndexes { + delete(w.messageIndexes, k) + } + w.stats.ChunkCount++ + return nil +} + +func (w *Writer) WriteMessage(m *Message) error { + if w.channels[m.ChannelID] == nil { + return fmt.Errorf("unrecognized channel %d", m.ChannelID) + } + msglen := 2 + 4 + 8 + 8 + len(m.Data) + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint16(w.msg, m.ChannelID) + offset += putUint32(w.msg[offset:], m.Sequence) + offset += putUint64(w.msg[offset:], uint64(m.PublishTime)) + offset += putUint64(w.msg[offset:], uint64(m.RecordTime)) + offset += copy(w.msg[offset:], m.Data) + w.stats.ChannelStats[m.ChannelID]++ + w.stats.MessageCount++ + if w.chunked { + + // TODO preallocate or maybe fancy structure. These could be conserved + // across chunks too, which might work ok assuming similar numbers of + // messages/chan/chunk. + + idx, ok := w.messageIndexes[m.ChannelID] + if !ok { + idx = &MessageIndex{ + ChannelID: m.ChannelID, + Count: 0, + Records: nil, + } + w.messageIndexes[m.ChannelID] = idx + } + idx.Records = append(idx.Records, MessageIndexRecord{m.RecordTime, uint64(w.compressedWriter.Size())}) + idx.Count++ + _, err := w.writeRecord(w.compressedWriter, OpMessage, w.msg[:offset]) + if err != nil { + return err + } + if w.compressedWriter.Size() > w.chunksize { + err := w.writeChunk() + if err != nil { + return err + } + } + return nil + } + _, err := w.writeRecord(w.w, OpMessage, w.msg[:offset]) + if err != nil { + return err + } + return nil +} + +func (w *Writer) WriteMessageIndex(idx *MessageIndex) error { + datalen := len(idx.Records) * (8 + 8) + msglen := 2 + 4 + 4 + datalen + 4 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint16(w.msg, idx.ChannelID) + offset += putUint32(w.msg[offset:], idx.Count) + offset += putUint32(w.msg[offset:], uint32(datalen)) + for _, v := range idx.Records { + offset += putUint64(w.msg[offset:], uint64(v.Timestamp)) + offset += putUint64(w.msg[offset:], uint64(v.Offset)) + } + crc := crc32.ChecksumIEEE(w.msg[:offset]) + offset += putUint32(w.msg[offset:], crc) + _, err := w.writeRecord(w.w, OpMessageIndex, w.msg[:offset]) + return err +} + +func makePrefixedMap(m map[string]string) []byte { + maplen := 0 + for k, v := range m { + maplen += 4 + len(k) + 4 + len(v) + } + buf := make([]byte, maplen+4) + offset := putUint32(buf, uint32(maplen)) + for k, v := range m { + offset += putPrefixedString(buf[offset:], k) + offset += putPrefixedString(buf[offset:], v) + } + return buf +} + +func (w *Writer) WriteHeader(profile string, library string, metadata map[string]string) error { + data := makePrefixedMap(metadata) + buf := make([]byte, len(data)+len(profile)+4+len(library)+4) + offset := putPrefixedString(buf, profile) + offset += putPrefixedString(buf[offset:], library) + offset += copy(buf[offset:], data) + _, err := w.writeRecord(w.w, OpHeader, buf[:offset]) + return err +} + +func (w *Writer) WriteChannelInfo(c *ChannelInfo) error { + userdata := makePrefixedMap(c.UserData) + msglen := 2 + 4 + len(c.TopicName) + 4 + len(c.Encoding) + 4 + len(c.SchemaName) + 4 + len(c.Schema) + len(userdata) + 4 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint16(w.msg, c.ChannelID) + offset += putPrefixedString(w.msg[offset:], c.TopicName) + offset += putPrefixedString(w.msg[offset:], c.Encoding) + offset += putPrefixedString(w.msg[offset:], c.SchemaName) + offset += putPrefixedBytes(w.msg[offset:], c.Schema) + offset += copy(w.msg[offset:], userdata) + crc := crc32.ChecksumIEEE(w.msg[:offset]) + offset += putUint32(w.msg[offset:], crc) + var err error + if w.chunked { + _, err = w.writeRecord(w.compressedWriter, OpChannelInfo, w.msg[:offset]) + if err != nil { + return err + } + } else { + _, err = w.writeRecord(w.w, OpChannelInfo, w.msg[:offset]) + if err != nil { + return err + } + } + if _, ok := w.channels[c.ChannelID]; !ok { + w.stats.ChannelCount++ + w.channels[c.ChannelID] = c + } + return nil +} + +func (w *Writer) WriteAttachment(a *Attachment) error { + msglen := 4 + len(a.Name) + 8 + 4 + len(a.ContentType) + 8 + len(a.Data) + 4 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putPrefixedString(w.msg, a.Name) + offset += putUint64(w.msg[offset:], a.RecordTime) + offset += putPrefixedString(w.msg[offset:], a.ContentType) + offset += putUint64(w.msg[offset:], uint64(len(a.Data))) + offset += copy(w.msg[offset:], a.Data) + attachmentOffset := w.w.Size() + _, err := w.writeRecord(w.w, OpAttachment, w.msg[:offset]) + if err != nil { + return err + } + w.attachmentIndexes = append(w.attachmentIndexes, &AttachmentIndex{ + RecordTime: a.RecordTime, + AttachmentSize: uint64(len(a.Data)), + Name: a.Name, + ContentType: a.ContentType, + Offset: attachmentOffset, + }) + w.stats.AttachmentCount++ + return nil +} + +func (w *Writer) WriteAttachmentIndex(idx *AttachmentIndex) error { + msglen := 8 + 8 + 4 + len(idx.Name) + 4 + len(idx.ContentType) + 8 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint64(w.msg, idx.RecordTime) + offset += putUint64(w.msg[offset:], idx.AttachmentSize) + offset += putPrefixedString(w.msg[offset:], idx.Name) + offset += putPrefixedString(w.msg[offset:], idx.ContentType) + offset += putUint64(w.msg[offset:], idx.Offset) + _, err := w.writeRecord(w.w, OpAttachmentIndex, w.msg[:offset]) + return err +} + +func (w *Writer) writeChunkIndex(idx *ChunkIndex) error { + msgidxlen := len(idx.MessageIndexOffsets) * (2 + 8) + msglen := 8 + 8 + 8 + 4 + msgidxlen + 8 + 4 + len(idx.Compression) + 8 + 8 + 4 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint64(w.msg, idx.StartTime) + offset += putUint64(w.msg[offset:], idx.EndTime) + offset += putUint64(w.msg[offset:], idx.ChunkOffset) + + offset += putUint32(w.msg[offset:], uint32(msgidxlen)) + for k, v := range idx.MessageIndexOffsets { + offset += putUint16(w.msg[offset:], k) + offset += putUint64(w.msg[offset:], v) + } + offset += putUint64(w.msg[offset:], idx.MessageIndexLength) + offset += putPrefixedString(w.msg[offset:], string(idx.Compression)) + offset += putUint64(w.msg[offset:], idx.CompressedSize) + offset += putUint64(w.msg[offset:], idx.UncompressedSize) + + crc := crc32.ChecksumIEEE(w.msg[:offset]) + offset += putUint32(w.msg[offset:], crc) + _, err := w.writeRecord(w.w, OpChunkIndex, w.msg[:offset]) + return err +} + +func (w *Writer) WriteStatistics(s *Statistics) error { + msglen := 8 + 4 + 4 + 4 + len(s.ChannelStats)*(2+8) + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint64(w.msg, s.MessageCount) + offset += putUint32(w.msg[offset:], s.ChannelCount) + offset += putUint32(w.msg[offset:], s.AttachmentCount) + offset += putUint32(w.msg[offset:], s.ChunkCount) + offset += putUint32(w.msg[offset:], uint32(len(s.ChannelStats)*(2+8))) + for k, v := range s.ChannelStats { + offset += putUint16(w.msg[offset:], k) + offset += putUint64(w.msg[offset:], v) + } + _, err := w.writeRecord(w.w, OpStatistics, w.msg[:offset]) + return err +} + +func (w *Writer) WriteFooter(f *Footer) error { + msglen := 8 + 4 + if len(w.msg) < msglen { + w.msg = make([]byte, 2*msglen) + } + offset := putUint64(w.msg, f.IndexOffset) + offset += putUint32(w.msg[offset:], f.IndexCRC) + _, err := w.writeRecord(w.w, OpFooter, w.msg[:offset]) + return err +} + +func (w *Writer) Close() error { + if w.chunked { + err := w.writeChunk() + if err != nil { + return err + } + } + indexOffset := w.w.Size() + w.chunked = false + for _, channelInfo := range w.channels { + err := w.WriteChannelInfo(channelInfo) + if err != nil { + return err + } + } + for _, chunkidx := range w.chunkIndexes { + err := w.writeChunkIndex(chunkidx) + if err != nil { + return err + } + } + for _, attachmentidx := range w.attachmentIndexes { + err := w.WriteAttachmentIndex(attachmentidx) + if err != nil { + return err + } + } + err := w.WriteStatistics(w.stats) + if err != nil { + return err + } + err = w.WriteFooter(&Footer{ + IndexOffset: indexOffset, + IndexCRC: 0, + }) + if err != nil { + return err + } + _, err = w.w.Write(Magic) + if err != nil { + return err + } + return nil +} + +type WriterOptions struct { + IncludeCRC bool + Chunked bool + ChunkSize int64 + Compression CompressionFormat +} + +func NewWriter(w io.Writer, opts *WriterOptions) (*Writer, error) { + writer := NewWriteSizer(w) + _, err := writer.Write(Magic) + if err != nil { + return nil, err + } + compressed := bytes.Buffer{} + var compressedWriter *CountingCRCWriter + if opts.Compression != "" { + switch opts.Compression { + case CompressionLZ4: + compressedWriter = NewCountingCRCWriter(lz4.NewWriter(&compressed), opts.IncludeCRC) + case CompressionZSTD: + zw, err := zstd.NewWriter(&compressed) + if err != nil { + return nil, err + } + compressedWriter = NewCountingCRCWriter(zw, opts.IncludeCRC) + case CompressionNone: + compressedWriter = NewCountingCRCWriter(BufCloser{&compressed}, opts.IncludeCRC) + default: + return nil, fmt.Errorf("unsupported compression") + } + if opts.ChunkSize == 0 { + opts.ChunkSize = 1024 * 1024 + } + } + return &Writer{ + w: writer, + buf8: make([]byte, 8), + channels: make(map[uint16]*ChannelInfo), + messageIndexes: make(map[uint16]*MessageIndex), + uncompressed: &bytes.Buffer{}, + compressed: &compressed, + chunksize: opts.ChunkSize, + chunked: opts.Chunked, + compression: opts.Compression, + compressedWriter: compressedWriter, + includeCRC: opts.IncludeCRC, + stats: &Statistics{ + ChannelStats: make(map[uint16]uint64), + }, + }, nil +} diff --git a/go/libmcap/writer_test.go b/go/libmcap/writer_test.go new file mode 100644 index 0000000000..325abd1000 --- /dev/null +++ b/go/libmcap/writer_test.go @@ -0,0 +1,152 @@ +package libmcap + +import ( + "bytes" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMCAPReadWrite(t *testing.T) { + t.Run("test header", func(t *testing.T) { + buf := &bytes.Buffer{} + w, err := NewWriter(buf, &WriterOptions{Compression: CompressionLZ4}) + assert.Nil(t, err) + err = w.WriteHeader("ros1", "", map[string]string{"foo": "bar"}) + assert.Nil(t, err) + lexer := NewLexer(buf) + token := lexer.Next() + // body of the header is the profile, followed by the metadata map + offset := 0 + data := token.bytes() + profile, offset, err := readPrefixedString(data, offset) + assert.Nil(t, err) + assert.Equal(t, "ros1", profile) + library, offset, err := readPrefixedString(data, offset) + assert.Nil(t, err) + assert.Equal(t, "", library) + metadata, offset, err := readPrefixedMap(data, offset) + assert.Nil(t, err) + assert.Equal(t, 1, len(metadata)) + assert.Equal(t, "bar", metadata["foo"]) + assert.Equal(t, TokenHeader, token.TokenType) + }) +} + +func TestChunkedReadWrite(t *testing.T) { + for _, compression := range []CompressionFormat{ + CompressionLZ4, + CompressionZSTD, + CompressionNone, + } { + t.Run(fmt.Sprintf("chunked file with %s", compression), func(t *testing.T) { + buf := &bytes.Buffer{} + w, err := NewWriter(buf, &WriterOptions{ + Chunked: true, + Compression: compression, + IncludeCRC: true, + }) + assert.Nil(t, err) + err = w.WriteHeader("ros1", "", map[string]string{"foo": "bar"}) + assert.Nil(t, err) + err = w.WriteChannelInfo(&ChannelInfo{ + ChannelID: 1, + TopicName: "/test", + Encoding: "ros1", + SchemaName: "foo", + Schema: []byte{}, + UserData: map[string]string{ + "callerid": "100", + }, + }) + assert.Nil(t, err) + err = w.WriteMessage(&Message{ + ChannelID: 1, + Sequence: 0, + RecordTime: 100, + PublishTime: 100, + Data: []byte{ + 1, + 2, + 3, + 4, + }, + }) + assert.Nil(t, w.Close()) + assert.Nil(t, err) + lexer := NewLexer(buf) + for i, expected := range []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenChannelInfo, + TokenStatistics, + TokenFooter, + TokenEOF, + } { + tok := lexer.Next() + _ = tok.bytes() // need to read the data + assert.Equal(t, expected, tok.TokenType, fmt.Sprintf("want %s got %s at %d", Token{expected, 0, nil}, tok.TokenType, i)) + } + }) + } +} + +func TestUnchunkedReadWrite(t *testing.T) { + buf := &bytes.Buffer{} + w, err := NewWriter(buf, &WriterOptions{}) + assert.Nil(t, err) + err = w.WriteHeader("ros1", "", map[string]string{"foo": "bar"}) + assert.Nil(t, err) + err = w.WriteChannelInfo(&ChannelInfo{ + ChannelID: 1, + TopicName: "/test", + Encoding: "ros1", + SchemaName: "foo", + Schema: []byte{}, + UserData: map[string]string{ + "callerid": "100", + }, + }) + assert.Nil(t, err) + err = w.WriteMessage(&Message{ + ChannelID: 1, + Sequence: 0, + RecordTime: 100, + PublishTime: 100, + Data: []byte{ + 1, + 2, + 3, + 4, + }, + }) + assert.Nil(t, err) + + err = w.WriteAttachment(&Attachment{ + Name: "file.jpg", + RecordTime: 0, + ContentType: "image/jpeg", + Data: []byte{0x01, 0x02, 0x03, 0x04}, + }) + assert.Nil(t, err) + w.Close() + + lexer := NewLexer(buf) + for _, expected := range []TokenType{ + TokenHeader, + TokenChannelInfo, + TokenMessage, + TokenAttachment, + TokenChannelInfo, + TokenAttachmentIndex, + TokenStatistics, + TokenFooter, + TokenEOF, + } { + tok := lexer.Next() + _ = tok.bytes() + assert.Equal(t, expected, tok.TokenType, fmt.Sprintf("want %s got %s", Token{expected, 0, nil}, tok)) + } +} diff --git a/go/mcap/README.md b/go/mcap/README.md new file mode 100644 index 0000000000..18ea4fe6e8 --- /dev/null +++ b/go/mcap/README.md @@ -0,0 +1,64 @@ +## mcap tool + +> Note: this tool is experimental and will change without warning until finalization of the MCAP spec. + +A basic command line tool to demonstrate the mcap file format. See `mcap -h` +for details: + + A small utility for demonstration of the mcap file format + + Usage: + mcap [command] + + Available Commands: + cat Cat the messages in an mcap file to stdout + completion Generate the autocompletion script for the specified shell + convert Convert a bag file to an mcap file + help Help about any command + info Report statistics about an mcap file + + Flags: + --config string config file (default is $HOME/.mcap.yaml) + -h, --help help for mcap + -t, --toggle Help message for toggle + + Use "mcap [command] --help" for more information about a command. + + + +Examples: + + +Convert a bag file to mcap: + + [~/work/mcap/go/mcap] (task/mcap-client) $ mcap convert ../../testdata/bags/demo.bag demo.mcap + +Report summary statistics on an mcap file: + + [~/work/mcap/go/mcap] (task/mcap-client) $ mcap info demo.mcap + duration: 7.780758504s + start: 2017-03-21T19:26:20.103843113-07:00 + end: 2017-03-21T19:26:27.884601617-07:00 + messages: 1606 + chunks: + lz4: [27/27 chunks] (44.32%) + channels + (0) /diagnostics: 52 msgs + (1) /image_color/compressed: 234 msgs + (2) /tf: 774 msgs + (3) /radar/points: 156 msgs + (4) /radar/range: 156 msgs + (5) /radar/tracks: 156 msgs + (6) /velodyne_points: 78 msgs + attachments: 0 + [~/work/mcap/go/mcap] (task/mcap-client) $ mcap cat demo.mcap --topics /tf,/diagnostics | head -n 10 + 1490149580103843113 /diagnostics [42 10 0 0 204 224 209 88 99 250]... + 1490149580103843113 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580113944947 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580124028613 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580134219155 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580144292780 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580154895238 /tf [1 0 0 0 0 0 0 0 204 224]... + 1490149580165152280 /diagnostics [94 13 0 0 204 224 209 88 174 52]... + 1490149580165152280 /diagnostics [95 13 0 0 204 224 209 88 215 86]... + 1490149580165152280 /tf [1 0 0 0 0 0 0 0 204 224]... diff --git a/go/mcap/cmd/cat.go b/go/mcap/cmd/cat.go new file mode 100644 index 0000000000..547fb85594 --- /dev/null +++ b/go/mcap/cmd/cat.go @@ -0,0 +1,50 @@ +package cmd + +import ( + "fmt" + "log" + "math" + "os" + "strings" + + "github.com/foxglove/mcap/go/libmcap" + "github.com/spf13/cobra" +) + +var ( + topics string + start int64 + end int64 +) + +var catCmd = &cobra.Command{ + Use: "cat", + Short: "Cat the messages in an mcap file to stdout", + Run: func(cmd *cobra.Command, args []string) { + topics := strings.FieldsFunc(topics, func(c rune) bool { return c == ',' }) + f, err := os.Open(args[0]) + if err != nil { + log.Fatal(err) + } + reader := libmcap.NewReader(f) + it, err := reader.Messages(start, end, topics, true) + if err != nil { + log.Fatal(err) + } + for { + ci, msg, err := it.Next() + if err != nil { + log.Fatal(err) + } + fmt.Printf("%d %s %v...\n", msg.RecordTime, ci.TopicName, msg.Data[:10]) + } + }, +} + +func init() { + rootCmd.AddCommand(catCmd) + + catCmd.PersistentFlags().Int64VarP(&start, "start seconds", "", 0, "start time (epoch seconds)") + catCmd.PersistentFlags().Int64VarP(&end, "end seconds", "", math.MaxInt64, "end time (epoch seconds)") + catCmd.PersistentFlags().StringVarP(&topics, "topics", "", "", "comma-separated list of topics") +} diff --git a/go/mcap/cmd/convert.go b/go/mcap/cmd/convert.go new file mode 100644 index 0000000000..b1a6cb3980 --- /dev/null +++ b/go/mcap/cmd/convert.go @@ -0,0 +1,39 @@ +package cmd + +import ( + "errors" + "io" + "log" + "os" + + "github.com/foxglove/mcap/go/libmcap" + "github.com/spf13/cobra" +) + +var convertCmd = &cobra.Command{ + Use: "convert [input] [output]", + Short: "Convert a bag file to an mcap file", + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 2 { + log.Fatal("supply an input and output file (see mcap convert -h)") + } + f, err := os.Open(args[0]) + if err != nil { + log.Fatal("failed to open input: %w", err) + } + defer f.Close() + w, err := os.Create(args[1]) + if err != nil { + log.Fatal("failed to open output: %w", err) + } + defer w.Close() + err = libmcap.Bag2MCAP(f, w) + if err != nil && !errors.Is(err, io.EOF) { + log.Fatal("failed to convert file: ", err) + } + }, +} + +func init() { + rootCmd.AddCommand(convertCmd) +} diff --git a/go/mcap/cmd/info.go b/go/mcap/cmd/info.go new file mode 100644 index 0000000000..cda29a6b95 --- /dev/null +++ b/go/mcap/cmd/info.go @@ -0,0 +1,34 @@ +package cmd + +import ( + "fmt" + "log" + "os" + + "github.com/foxglove/mcap/go/libmcap" + "github.com/spf13/cobra" +) + +var infoCmd = &cobra.Command{ + Use: "info", + Short: "Report statistics about an mcap file", + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 1 { + log.Fatal("Unexpected number of args") + } + r, err := os.Open(args[0]) + if err != nil { + log.Fatal(err) + } + reader := libmcap.NewReader(r) + info, err := reader.Info() + if err != nil { + log.Fatal(err) + } + fmt.Printf("%+v\n", info) + }, +} + +func init() { + rootCmd.AddCommand(infoCmd) +} diff --git a/go/mcap/cmd/root.go b/go/mcap/cmd/root.go new file mode 100644 index 0000000000..c128ee1522 --- /dev/null +++ b/go/mcap/cmd/root.go @@ -0,0 +1,43 @@ +package cmd + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" + + "github.com/spf13/viper" +) + +var cfgFile string + +var rootCmd = &cobra.Command{ + Use: "mcap", + Short: "A small utility for demonstration of the mcap file format", +} + +func Execute() { + cobra.CheckErr(rootCmd.Execute()) +} + +func init() { + cobra.OnInitialize(initConfig) + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.mcap.yaml)") + rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") +} + +func initConfig() { + if cfgFile != "" { + viper.SetConfigFile(cfgFile) + } else { + home, err := os.UserHomeDir() + cobra.CheckErr(err) + viper.AddConfigPath(home) + viper.SetConfigType("yaml") + viper.SetConfigName(".mcap") + } + viper.AutomaticEnv() + if err := viper.ReadInConfig(); err == nil { + fmt.Fprintln(os.Stderr, "Using config file:", viper.ConfigFileUsed()) + } +} diff --git a/go/mcap/go.mod b/go/mcap/go.mod new file mode 100644 index 0000000000..70f2fd4fd7 --- /dev/null +++ b/go/mcap/go.mod @@ -0,0 +1,35 @@ +module github.com/foxglove/mcap/go/mcap + +go 1.17 + +require ( + github.com/foxglove/mcap/go/libmcap v0.0.0-00010101000000-000000000000 + github.com/spf13/cobra v1.3.0 + github.com/spf13/viper v1.10.1 +) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/fsnotify/fsnotify v1.5.1 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/klauspost/compress v1.14.1 // indirect + github.com/magiconair/properties v1.8.5 // indirect + github.com/mitchellh/mapstructure v1.4.3 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect + github.com/pierrec/lz4/v4 v4.1.12 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/spf13/afero v1.6.0 // indirect + github.com/spf13/cast v1.4.1 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/stretchr/testify v1.7.0 // indirect + github.com/subosito/gotenv v1.2.0 // indirect + golang.org/x/sys v0.0.0-20211210111614-af8b64212486 // indirect + golang.org/x/text v0.3.7 // indirect + gopkg.in/ini.v1 v1.66.2 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) + +replace github.com/foxglove/mcap/go/libmcap => ../libmcap diff --git a/go/mcap/go.sum b/go/mcap/go.sum new file mode 100644 index 0000000000..3a30fb8e22 --- /dev/null +++ b/go/mcap/go.sum @@ -0,0 +1,789 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.14.1 h1:hLQYb23E8/fO+1u53d02A97a8UnsddcvYzq4ERRU4ds= +github.com/klauspost/compress v1.14.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pierrec/lz4/v4 v4.1.12 h1:44l88ehTZAUGW4VlO1QC4zkilL99M6Y9MXNwEs0uzP8= +github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= +github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0= +github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= +github.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk= +github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486 h1:5hpz5aRr+W1erYCL5JRhSUBJRph7l9XkNveoExlrKYk= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/go/mcap/main.go b/go/mcap/main.go new file mode 100644 index 0000000000..2022a4e9b5 --- /dev/null +++ b/go/mcap/main.go @@ -0,0 +1,22 @@ +/* +Copyright © 2022 NAME HERE + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package main + +import "github.com/foxglove/mcap/go/mcap/cmd" + +func main() { + cmd.Execute() +} diff --git a/testdata/bags/demo.bag b/testdata/bags/demo.bag new file mode 100644 index 0000000000..6864414096 --- /dev/null +++ b/testdata/bags/demo.bag @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e6e0049cc2567afe437da3f019debfcf1d96bd33f8c12d3680353a0329b6510 +size 70311473 diff --git a/testdata/mcap/demo.mcap b/testdata/mcap/demo.mcap new file mode 100644 index 0000000000..c95539109c --- /dev/null +++ b/testdata/mcap/demo.mcap @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48010dc923d90714548468b32c1afeb322dd65d42b8e2a443f4b7adc1c0b126a +size 69589630 From 53c1d3c49b87154a4ac5ea7dde54e80664fea371 Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Wed, 26 Jan 2022 16:34:59 -0800 Subject: [PATCH 034/635] Small update to golang CLI README (#68) --- go/mcap/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/go/mcap/README.md b/go/mcap/README.md index 18ea4fe6e8..61d5d445ab 100644 --- a/go/mcap/README.md +++ b/go/mcap/README.md @@ -51,6 +51,9 @@ Report summary statistics on an mcap file: (5) /radar/tracks: 156 msgs (6) /velodyne_points: 78 msgs attachments: 0 + +Echo messages to stdout using the end of file index: + [~/work/mcap/go/mcap] (task/mcap-client) $ mcap cat demo.mcap --topics /tf,/diagnostics | head -n 10 1490149580103843113 /diagnostics [42 10 0 0 204 224 209 88 99 250]... 1490149580103843113 /tf [1 0 0 0 0 0 0 0 204 224]... From 10484d818b7738c5b55073cb0cdcef39714e0c41 Mon Sep 17 00:00:00 2001 From: Wyatt Alt Date: Thu, 27 Jan 2022 13:42:24 -0800 Subject: [PATCH 035/635] Remove channel from golang lexer (#72) Changes the lexer to no longer return errors as tokens, and instead return an error value on calls to next. --- go/libmcap/benchmark_test.go | 3 +- go/libmcap/indexed_message_iterator.go | 40 ++-- go/libmcap/lexer.go | 239 ++++++++++------------- go/libmcap/lexer_test.go | 126 ++++++------ go/libmcap/mcap.go | 2 +- go/libmcap/reader.go | 21 +- go/libmcap/reader_test.go | 18 +- go/libmcap/unindexed_message_iterator.go | 14 +- go/libmcap/writer_test.go | 22 ++- go/mcap/cmd/cat.go | 5 +- go/mcap/cmd/info.go | 5 +- 11 files changed, 254 insertions(+), 241 deletions(-) diff --git a/go/libmcap/benchmark_test.go b/go/libmcap/benchmark_test.go index 5f6692a4e6..aadf3310c7 100644 --- a/go/libmcap/benchmark_test.go +++ b/go/libmcap/benchmark_test.go @@ -20,7 +20,8 @@ func BenchmarkMessageIteration(b *testing.B) { mcapfile := &bytes.Buffer{} err = Bag2MCAP(bagfile, mcapfile) assert.Nil(b, err) - r := NewReader(bytes.NewReader(mcapfile.Bytes())) + r, err := NewReader(bytes.NewReader(mcapfile.Bytes())) + assert.Nil(b, err) it, err := r.Messages(0, time.Now().UnixNano(), []string{}, true) assert.Nil(b, err) c := 0 diff --git a/go/libmcap/indexed_message_iterator.go b/go/libmcap/indexed_message_iterator.go index bad3cca231..bb7123f32c 100644 --- a/go/libmcap/indexed_message_iterator.go +++ b/go/libmcap/indexed_message_iterator.go @@ -65,7 +65,6 @@ func (it *indexedMessageIterator) parseIndexSection() error { if err != nil { return err } - it.lexer.SetLexNext() var msg []byte defer func() { it.chunksets = sortOverlappingChunks(it.chunkIndexes) @@ -75,7 +74,10 @@ func (it *indexedMessageIterator) parseIndexSection() error { // populating the index fields. Top: for { - tok := it.lexer.Next() + tok, err := it.lexer.Next() + if err != nil { + return fmt.Errorf("lexer error: %w", err) + } msg = tok.bytes() switch tok.TokenType { case TokenChunkIndex: @@ -108,8 +110,6 @@ Top: if len(it.topics) == 0 || it.topics[channelInfo.TopicName] { it.channels[channelInfo.ChannelID] = channelInfo } - case TokenEOF: - return io.EOF case TokenStatistics: stats := parseStatisticsRecord(msg) it.statistics = stats @@ -171,7 +171,10 @@ func (it *indexedMessageIterator) loadChunk(index int) error { if err != nil { return err } - tok := it.lexer.Next() + tok, err := it.lexer.Next() + if err != nil { + return err + } var chunk *Chunk switch tok.TokenType { case TokenChunk: @@ -210,8 +213,12 @@ func (it *indexedMessageIterator) loadChunk(index int) error { } it.activeChunkIndex = index - it.activeChunkLexer = NewLexer(it.activeChunkReader) - it.activeChunkLexer.SetLexNext() + it.activeChunkLexer, err = NewLexer(it.activeChunkReader, &LexOpts{ + SkipMagic: true, + }) + if err != nil { + return fmt.Errorf("failed to lex chunk: %s", err) + } return nil } @@ -230,7 +237,10 @@ func (it *indexedMessageIterator) loadNextChunkset() error { } // now we're at the message index implicated by the chunk; parse one record var messageIndex *MessageIndex - tok := it.lexer.Next() + tok, err := it.lexer.Next() + if err != nil { + return err + } switch tok.TokenType { case TokenMessageIndex: messageIndex = parseMessageIndex(tok.bytes()) @@ -306,15 +316,17 @@ func (it *indexedMessageIterator) Next() (*ChannelInfo, *Message, error) { if err != nil { return nil, nil, err } - tok := it.activeChunkLexer.Next() + tok, err := it.activeChunkLexer.Next() + if err != nil { + return nil, nil, err + } switch tok.TokenType { case TokenMessage: - msg := parseMessage(tok.bytes()) + msg, err := parseMessage(tok.bytes()) + if err != nil { + return nil, nil, err + } return it.channels[msg.ChannelID], msg, nil - case TokenError: - return nil, nil, fmt.Errorf("error: %s", tok.bytes()) - case TokenEOF: // end of chunk - return nil, nil, io.EOF default: _ = tok.bytes() return nil, nil, fmt.Errorf("unexpected token %s in message section", tok) diff --git a/go/libmcap/lexer.go b/go/libmcap/lexer.go index 816e95b62a..af939f5c12 100644 --- a/go/libmcap/lexer.go +++ b/go/libmcap/lexer.go @@ -41,18 +41,12 @@ func (t TokenType) String() string { return "statistics" case TokenMessageIndex: return "message index" - case TokenError: - return "error" - case TokenEOF: - return "eof" } return "unknown" } func (t Token) String() string { switch t.TokenType { - case TokenError: - return fmt.Sprintf("error: %s", string(t.bytes())) default: return t.TokenType.String() } @@ -69,8 +63,6 @@ const ( TokenStatistics TokenChunk TokenMessageIndex - TokenEOF - TokenError ) type Token struct { @@ -85,8 +77,6 @@ func (t Token) bytes() []byte { return data } -type stateFn func(*lexer) stateFn - type decoders struct { lz4 *lz4.Reader zstd *zstd.Decoder @@ -94,64 +84,26 @@ type decoders struct { } type lexer struct { - state stateFn - basereader io.Reader - chunkreader io.Reader - reader io.Reader - chunkReader ResettableWriteCloser - tokens chan Token - emitChunks bool - - compressedChunk []byte - chunk []byte - skipbuf []byte - decoders decoders - inChunk bool - buf []byte - validateCRC bool -} + basereader io.Reader + reader io.Reader + emitChunks bool -func (l *lexer) SetLexNext() { - l.state = lexNext -} - -func (l *lexer) Next() Token { - if l.state == nil { - return Token{TokenEOF, 0, bytes.NewReader(nil)} - } - for { - select { - case token := <-l.tokens: - return token - default: - l.state = l.state(l) - } - } -} - -func (l *lexer) emit(t TokenType, n int64, data io.Reader) { - l.tokens <- Token{t, n, data} -} - -func (l *lexer) error(err error) stateFn { - if errors.Is(err, io.EOF) || errors.Is(err, io.ErrUnexpectedEOF) { - l.emit(TokenEOF, 0, bytes.NewReader(nil)) - } else { - l.emit(TokenError, int64(len(err.Error())), bytes.NewReader([]byte(err.Error()))) - } - return nil + decoders decoders + inChunk bool + buf []byte + validateCRC bool } -func lexMagic(l *lexer) stateFn { +func validateMagic(r io.Reader) error { magic := make([]byte, len(Magic)) - _, err := l.reader.Read(magic) + _, err := io.ReadFull(r, magic) if err != nil { - return l.error(err) + return ErrBadMagic } if !bytes.Equal(magic, Magic) { - return l.error(ErrBadMagic) + return ErrBadMagic } - return lexNext + return nil } func (l *lexer) setNoneDecoder(buf []byte) { @@ -189,37 +141,26 @@ func (l *lexer) setZSTDDecoder(r io.Reader) error { return nil } -func skip(l *lexer, n uint64) stateFn { - if n > uint64(len(l.skipbuf)) { - l.skipbuf = make([]byte, 2*n) - } - _, err := l.reader.Read(l.skipbuf[:n]) - if err != nil { - return l.error(err) - } - return lexNext -} - -func lexChunk(l *lexer, recordSize uint64) stateFn { +func loadChunk(l *lexer, recordSize int64) error { if l.inChunk { - return l.error(ErrNestedChunk) + return ErrNestedChunk } - _, err := l.reader.Read(l.buf[:8+4+4]) + _, err := io.ReadFull(l.reader, l.buf[:8+4+4]) if err != nil { - return l.error(err) + return err } // Skip the uncompressed size; the lexer will read messages out of the // reader incrementally. _ = binary.LittleEndian.Uint64(l.buf[:8]) uncompressedCRC := binary.LittleEndian.Uint32(l.buf[8:12]) compressionLen := binary.LittleEndian.Uint32(l.buf[12:16]) - _, err = l.reader.Read(l.buf[:compressionLen]) + _, err = io.ReadFull(l.reader, l.buf[:compressionLen]) if err != nil { - return l.error(err) + return err } compression := l.buf[:compressionLen] // will eof at the end of the chunk - lr := io.LimitReader(l.reader, int64(recordSize-16-uint64(compressionLen))) + lr := io.LimitReader(l.reader, int64(uint64(recordSize)-16-uint64(compressionLen))) switch CompressionFormat(compression) { case CompressionNone: l.reader = lr @@ -228,10 +169,10 @@ func lexChunk(l *lexer, recordSize uint64) stateFn { case CompressionZSTD: err = l.setZSTDDecoder(lr) if err != nil { - return l.error(err) + return err } default: - return l.error(fmt.Errorf("unsupported compression: %s", string(compression))) + return fmt.Errorf("unsupported compression: %s", string(compression)) } // if we are validating the CRC, we need to fully decompress the chunk right @@ -242,85 +183,109 @@ func lexChunk(l *lexer, recordSize uint64) stateFn { if l.validateCRC { uncompressed, err := io.ReadAll(l.reader) if err != nil { - return l.error(err) + return err } crc := crc32.ChecksumIEEE(uncompressed) if crc != uncompressedCRC { - return l.error(fmt.Errorf("invalid CRC: %x != %x", crc, uncompressedCRC)) + return fmt.Errorf("invalid CRC: %x != %x", crc, uncompressedCRC) } l.setNoneDecoder(uncompressed) } l.inChunk = true - return lexNext + return nil } -func lexNext(l *lexer) stateFn { - _, err := io.ReadFull(l.reader, l.buf[:9]) - if err != nil { - if l.inChunk && (errors.Is(err, io.ErrUnexpectedEOF) || errors.Is(err, io.EOF)) { // todo what's going on here - l.inChunk = false - l.reader = l.basereader - return lexNext - } - return l.error(err) - } - opcode := OpCode(l.buf[0]) - recordLen := binary.LittleEndian.Uint64(l.buf[1:9]) - switch opcode { - case OpHeader: - l.emit(TokenHeader, int64(recordLen), l.reader) - case OpChannelInfo: - l.emit(TokenChannelInfo, int64(recordLen), l.reader) - case OpFooter: - l.emit(TokenFooter, int64(recordLen), l.reader) - return lexMagic - case OpMessage: - l.emit(TokenMessage, int64(recordLen), l.reader) - case OpAttachment: - l.emit(TokenAttachment, int64(recordLen), l.reader) - case OpAttachmentIndex: - l.emit(TokenAttachmentIndex, int64(recordLen), l.reader) - case OpChunkIndex: - if !l.emitChunks { - return skip(l, recordLen) - } - l.emit(TokenChunkIndex, int64(recordLen), l.reader) - case OpStatistics: - l.emit(TokenStatistics, int64(recordLen), l.reader) - case OpMessageIndex: - if !l.emitChunks { - return skip(l, recordLen) +func (l *lexer) Next() (Token, error) { + for { + _, err := io.ReadFull(l.reader, l.buf[:9]) + if err != nil { + unexpectedEOF := errors.Is(err, io.ErrUnexpectedEOF) + eof := errors.Is(err, io.EOF) + if l.inChunk && eof { + l.inChunk = false + l.reader = l.basereader + continue + } + if unexpectedEOF || eof { + return Token{}, io.EOF + } + return Token{}, err } - l.emit(TokenMessageIndex, int64(recordLen), l.reader) - case OpChunk: - if !l.emitChunks { - return lexChunk(l, recordLen) + opcode := OpCode(l.buf[0]) + recordLen := int64(binary.LittleEndian.Uint64(l.buf[1:9])) + switch opcode { + case OpHeader: + return Token{TokenHeader, recordLen, l.reader}, nil + case OpChannelInfo: + return Token{TokenChannelInfo, recordLen, l.reader}, nil + case OpFooter: + return Token{TokenFooter, recordLen, l.reader}, nil + case OpMessage: + return Token{TokenMessage, recordLen, l.reader}, nil + case OpAttachment: + return Token{TokenAttachment, recordLen, l.reader}, nil + case OpAttachmentIndex: + return Token{TokenAttachmentIndex, recordLen, l.reader}, nil + case OpChunkIndex: + if !l.emitChunks { + _, err := io.CopyN(io.Discard, l.reader, recordLen) + if err != nil { + return Token{}, err + } + continue + } + return Token{TokenChunkIndex, recordLen, l.reader}, nil + case OpStatistics: + return Token{TokenStatistics, recordLen, l.reader}, nil + case OpMessageIndex: + if !l.emitChunks { + _, err := io.CopyN(io.Discard, l.reader, recordLen) + if err != nil { + return Token{}, err + } + continue + } + return Token{TokenMessageIndex, recordLen, l.reader}, nil + case OpChunk: + if !l.emitChunks { + err := loadChunk(l, recordLen) + if err != nil { + return Token{}, err + } + continue + } + return Token{TokenChunk, recordLen, l.reader}, nil + default: + continue // skip unrecognized opcodes } - l.emit(TokenChunk, int64(recordLen), l.reader) - default: - return skip(l, recordLen) } - return lexNext } -type lexOpts struct { - validateCRC bool - emitChunks bool +type LexOpts struct { + SkipMagic bool + ValidateCRC bool + EmitChunks bool } -func NewLexer(r io.Reader, opts ...*lexOpts) *lexer { - var validateCRC, emitChunks bool +func NewLexer(r io.Reader, opts ...*LexOpts) (*lexer, error) { + var validateCRC, emitChunks, skipMagic bool if len(opts) > 0 { - validateCRC = opts[0].validateCRC - emitChunks = opts[0].emitChunks + validateCRC = opts[0].ValidateCRC + emitChunks = opts[0].EmitChunks + skipMagic = opts[0].SkipMagic + } + + if !skipMagic { + err := validateMagic(r) + if err != nil { + return nil, err + } } return &lexer{ basereader: r, reader: r, - tokens: make(chan Token, 1), // why buf: make([]byte, 32), - state: lexMagic, validateCRC: validateCRC, emitChunks: emitChunks, - } + }, nil } diff --git a/go/libmcap/lexer_test.go b/go/libmcap/lexer_test.go index fd681714f1..7dc43a08b8 100644 --- a/go/libmcap/lexer_test.go +++ b/go/libmcap/lexer_test.go @@ -2,7 +2,9 @@ package libmcap import ( "bytes" + "errors" "fmt" + "io" "os" "testing" "time" @@ -20,7 +22,8 @@ func TestLexUnchunkedFile(t *testing.T) { attachment(), footer(), ) - lexer := NewLexer(bytes.NewReader(file)) + lexer, err := NewLexer(bytes.NewReader(file)) + assert.Nil(t, err) expected := []TokenType{ TokenHeader, TokenChannelInfo, @@ -31,7 +34,9 @@ func TestLexUnchunkedFile(t *testing.T) { TokenFooter, } for _, tt := range expected { - tk := lexer.Next() + tk, err := lexer.Next() + assert.Nil(t, err) + tk.bytes() assert.Equal(t, tt, tk.TokenType) } } @@ -40,10 +45,10 @@ func TestRejectsUnsupportedCompression(t *testing.T) { file := file( chunk(t, CompressionFormat("unknown"), chunk(t, CompressionLZ4, channelInfo(), message(), message())), ) - lexer := NewLexer(bytes.NewReader(file)) - token := lexer.Next() - assert.Equal(t, TokenError, token.TokenType) - assert.Equal(t, "unsupported compression: unknown", string(token.bytes())) + lexer, err := NewLexer(bytes.NewReader(file)) + assert.Nil(t, err) + _, err = lexer.Next() + assert.Equal(t, "unsupported compression: unknown", err.Error()) } func TestRejectsNestedChunks(t *testing.T) { @@ -52,17 +57,14 @@ func TestRejectsNestedChunks(t *testing.T) { chunk(t, CompressionLZ4, chunk(t, CompressionLZ4, channelInfo(), message(), message())), footer(), ) - lexer := NewLexer(bytes.NewReader(file)) - expected := []TokenType{ - TokenHeader, - TokenError, - } - var tk Token - for _, tt := range expected { - tk = lexer.Next() - assert.Equal(t, tt, tk.TokenType) - } - assert.Equal(t, ErrNestedChunk.Error(), string(tk.bytes())) + lexer, err := NewLexer(bytes.NewReader(file)) + assert.Nil(t, err) + // header, then error + tk, err := lexer.Next() + assert.Nil(t, err) + assert.Equal(t, tk.TokenType, TokenHeader) + tk, err = lexer.Next() + assert.ErrorIs(t, ErrNestedChunk, err) } func TestBadMagic(t *testing.T) { @@ -81,27 +83,24 @@ func TestBadMagic(t *testing.T) { } for _, c := range cases { t.Run(c.assertion, func(t *testing.T) { - lexer := NewLexer(bytes.NewReader(c.magic)) - tk := lexer.Next() - assert.Equal(t, TokenError, tk.TokenType) - assert.Equal(t, ErrBadMagic.Error(), string(tk.bytes())) + _, err := NewLexer(bytes.NewReader(c.magic)) + assert.ErrorIs(t, err, ErrBadMagic) }) } } func TestShortMagicResultsCorrectError(t *testing.T) { - lexer := NewLexer(bytes.NewReader(make([]byte, 4))) - tk := lexer.Next() - assert.Equal(t, TokenError, tk.TokenType) - assert.Equal(t, ErrBadMagic.Error(), string(tk.bytes())) + _, err := NewLexer(bytes.NewReader(make([]byte, 4))) + assert.ErrorIs(t, err, ErrBadMagic) } func TestReturnsEOFOnSuccessiveCalls(t *testing.T) { - lexer := NewLexer(bytes.NewReader(file())) - tk := lexer.Next() - assert.Equal(t, TokenEOF, tk.TokenType) - tk = lexer.Next() - assert.Equal(t, TokenEOF, tk.TokenType) + lexer, err := NewLexer(bytes.NewReader(file())) + assert.Nil(t, err) + _, err = lexer.Next() + assert.ErrorIs(t, err, io.EOF) + _, err = lexer.Next() + assert.ErrorIs(t, err, io.EOF) } func TestLexChunkedFile(t *testing.T) { @@ -120,9 +119,10 @@ func TestLexChunkedFile(t *testing.T) { attachment(), attachment(), footer(), ) - lexer := NewLexer(bytes.NewReader(file), &lexOpts{ - validateCRC: validateCRC, + lexer, err := NewLexer(bytes.NewReader(file), &LexOpts{ + ValidateCRC: validateCRC, }) + assert.Nil(t, err) expected := []TokenType{ TokenHeader, TokenChannelInfo, @@ -134,12 +134,18 @@ func TestLexChunkedFile(t *testing.T) { TokenAttachment, TokenAttachment, TokenFooter, - TokenEOF, } for i, tt := range expected { - tk := lexer.Next() + tk, err := lexer.Next() + tk.bytes() + assert.Nil(t, err) assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) } + + // now we are eof + tk, err := lexer.Next() + tk.bytes() + assert.ErrorIs(t, io.EOF, err) }) } }) @@ -154,10 +160,11 @@ func TestSkipsUnknownOpcodes(t *testing.T) { unrecognized, message(), ) - lexer := NewLexer(bytes.NewReader(file)) + lexer, err := NewLexer(bytes.NewReader(file)) + assert.Nil(t, err) expected := []TokenType{TokenHeader, TokenMessage} for i, tt := range expected { - tk := lexer.Next() + tk, _ := lexer.Next() _ = tk.bytes() assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) } @@ -172,9 +179,10 @@ func TestChunkCRCValidation(t *testing.T) { attachment(), attachment(), footer(), ) - lexer := NewLexer(bytes.NewReader(file), &lexOpts{ - validateCRC: true, + lexer, err := NewLexer(bytes.NewReader(file), &LexOpts{ + ValidateCRC: true, }) + assert.Nil(t, err) expected := []TokenType{ TokenHeader, TokenChannelInfo, @@ -186,10 +194,10 @@ func TestChunkCRCValidation(t *testing.T) { TokenAttachment, TokenAttachment, TokenFooter, - TokenEOF, } for i, tt := range expected { - tk := lexer.Next() + tk, err := lexer.Next() + assert.Nil(t, err) _ = tk.bytes() // always must consume the reader assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) } @@ -204,24 +212,24 @@ func TestChunkCRCValidation(t *testing.T) { attachment(), attachment(), footer(), ) - lexer := NewLexer(bytes.NewReader(file), &lexOpts{ - validateCRC: true, + lexer, err := NewLexer(bytes.NewReader(file), &LexOpts{ + ValidateCRC: true, }) + assert.Nil(t, err) expected := []TokenType{ TokenHeader, TokenChannelInfo, TokenMessage, TokenMessage, - TokenError, } for i, tt := range expected { - tk := lexer.Next() - data := tk.bytes() // always must consume the reader - if tt == TokenError { - assert.Equal(t, "invalid CRC: ffaaf97a != aaf97a", string(data)) - } + tk, err := lexer.Next() + assert.Nil(t, err) + _ = tk.bytes() // always must consume the reader assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) } + _, err = lexer.Next() + assert.Equal(t, "invalid CRC: ffaaf97a != aaf97a", err.Error()) }) } @@ -244,10 +252,11 @@ func TestChunkEmission(t *testing.T) { attachment(), attachment(), footer(), ) - lexer := NewLexer(bytes.NewReader(file), &lexOpts{ - validateCRC: validateCRC, - emitChunks: true, + lexer, err := NewLexer(bytes.NewReader(file), &LexOpts{ + ValidateCRC: validateCRC, + EmitChunks: true, }) + assert.Nil(t, err) expected := []TokenType{ TokenHeader, TokenChunk, @@ -255,13 +264,15 @@ func TestChunkEmission(t *testing.T) { TokenAttachment, TokenAttachment, TokenFooter, - TokenEOF, } for i, tt := range expected { - tk := lexer.Next() + tk, err := lexer.Next() + assert.Nil(t, err) _ = tk.bytes() // always must consume the reader assert.Equal(t, tt, tk.TokenType, fmt.Sprintf("mismatch element %d", i)) } + _, err = lexer.Next() + assert.ErrorIs(t, err, io.EOF) }) } }) @@ -291,20 +302,19 @@ func BenchmarkLexer(b *testing.B) { assert.Nil(b, err) reader := &bytes.Reader{} b.ResetTimer() - msg := make([]byte, 1024*1024) b.Run(c.assertion, func(b *testing.B) { for n := 0; n < b.N; n++ { t0 := time.Now() var tokens, bytecount int64 reader.Reset(input) - lexer := NewLexer(reader) + lexer, err := NewLexer(reader) + assert.Nil(b, err) for { - tok := lexer.Next() - if tok.TokenType == TokenEOF { + tok, err := lexer.Next() + if errors.Is(err, io.EOF) { break } - if int64(len(msg)) < tok.ByteCount { msg = make([]byte, tok.ByteCount) } diff --git a/go/libmcap/mcap.go b/go/libmcap/mcap.go index a96ab73ec7..7f6b7008b3 100644 --- a/go/libmcap/mcap.go +++ b/go/libmcap/mcap.go @@ -302,7 +302,7 @@ func (i Info) String() string { compressionRatio := 100 * (1 - float64(v.compressedSize)/float64(v.uncompressedSize)) fmt.Fprintf(buf, "\t%s: [%d/%d chunks] (%.2f%%) \n", k, v.count, chunkCount, compressionRatio) } - fmt.Fprintf(buf, "channels\n") + fmt.Fprintf(buf, "channels:\n") chanIDs := []uint16{} for chanID := range i.Channels { diff --git a/go/libmcap/reader.go b/go/libmcap/reader.go index 47e5eae987..b8f53261c8 100644 --- a/go/libmcap/reader.go +++ b/go/libmcap/reader.go @@ -90,7 +90,10 @@ func parseAttachmentIndex(buf []byte) (*AttachmentIndex, error) { }, nil } -func parseMessage(buf []byte) *Message { +func parseMessage(buf []byte) (*Message, error) { + if len(buf) < 2+4+8+8 { + return nil, io.ErrShortBuffer + } channelID, offset := getUint16(buf, 0) sequence, offset := getUint32(buf, offset) publishTime, offset := getUint64(buf, offset) @@ -102,7 +105,7 @@ func parseMessage(buf []byte) *Message { RecordTime: recordTime, PublishTime: publishTime, Data: data, - } + }, nil } func parseChunkIndex(buf []byte) (*ChunkIndex, error) { @@ -294,17 +297,21 @@ func (r *Reader) Info() (*Info, error) { }, nil } -func NewReader(r io.Reader) *Reader { +func NewReader(r io.Reader) (*Reader, error) { var rs io.ReadSeeker if readseeker, ok := r.(io.ReadSeeker); ok { rs = readseeker } + lexer, err := NewLexer(r, &LexOpts{ + EmitChunks: true, + }) + if err != nil { + return nil, err + } return &Reader{ - l: NewLexer(r, &lexOpts{ - emitChunks: true, - }), + l: lexer, r: r, rs: rs, channels: make(map[uint16]*ChannelInfo), - } + }, nil } diff --git a/go/libmcap/reader_test.go b/go/libmcap/reader_test.go index f39f758fee..496ce436c9 100644 --- a/go/libmcap/reader_test.go +++ b/go/libmcap/reader_test.go @@ -141,7 +141,8 @@ func TestMessageReading(t *testing.T) { w.Close() t.Run("read all messages", func(t *testing.T) { reader := bytes.NewReader(buf.Bytes()) - r := NewReader(reader) + r, err := NewReader(reader) + assert.Nil(t, err) it, err := r.Messages(0, 10000, []string{}, useIndex) assert.Nil(t, err) c := 0 @@ -160,7 +161,8 @@ func TestMessageReading(t *testing.T) { }) t.Run("read messages on one topic", func(t *testing.T) { reader := bytes.NewReader(buf.Bytes()) - r := NewReader(reader) + r, err := NewReader(reader) + assert.Nil(t, err) it, err := r.Messages(0, 10000, []string{"/test1"}, useIndex) assert.Nil(t, err) c := 0 @@ -179,7 +181,8 @@ func TestMessageReading(t *testing.T) { }) t.Run("read messages on multiple topics", func(t *testing.T) { reader := bytes.NewReader(buf.Bytes()) - r := NewReader(reader) + r, err := NewReader(reader) + assert.Nil(t, err) it, err := r.Messages(0, 10000, []string{"/test1", "/test2"}, useIndex) assert.Nil(t, err) c := 0 @@ -198,7 +201,8 @@ func TestMessageReading(t *testing.T) { }) t.Run("read messages in time range", func(t *testing.T) { reader := bytes.NewReader(buf.Bytes()) - r := NewReader(reader) + r, err := NewReader(reader) + assert.Nil(t, err) it, err := r.Messages(100, 200, []string{}, useIndex) assert.Nil(t, err) c := 0 @@ -227,7 +231,8 @@ func TestReaderCounting(t *testing.T) { mcapfile := &bytes.Buffer{} err = Bag2MCAP(bagfile, mcapfile) assert.Nil(t, err) - r := NewReader(bytes.NewReader(mcapfile.Bytes())) + r, err := NewReader(bytes.NewReader(mcapfile.Bytes())) + assert.Nil(t, err) it, err := r.Messages(0, time.Now().UnixNano(), []string{}, indexed) assert.Nil(t, err) c := 0 @@ -251,7 +256,8 @@ func TestMCAPInfo(t *testing.T) { mcapfile := &bytes.Buffer{} err = Bag2MCAP(bagfile, mcapfile) assert.Nil(t, err) - r := NewReader(bytes.NewReader(mcapfile.Bytes())) + r, err := NewReader(bytes.NewReader(mcapfile.Bytes())) + assert.Nil(t, err) info, err := r.Info() assert.Nil(t, err) assert.Equal(t, uint64(1606), info.Statistics.MessageCount) diff --git a/go/libmcap/unindexed_message_iterator.go b/go/libmcap/unindexed_message_iterator.go index faef46bb46..7c6fd13e85 100644 --- a/go/libmcap/unindexed_message_iterator.go +++ b/go/libmcap/unindexed_message_iterator.go @@ -15,12 +15,11 @@ type unindexedMessageIterator struct { func (it *unindexedMessageIterator) Next() (*ChannelInfo, *Message, error) { for { - token := it.lexer.Next() + token, err := it.lexer.Next() + if err != nil { + return nil, nil, err + } switch token.TokenType { - case TokenError: - return nil, nil, fmt.Errorf("%s", token.bytes()) - case TokenEOF: - return nil, nil, io.EOF case TokenChannelInfo: channelInfo, err := parseChannelInfo(token.bytes()) if err != nil { @@ -32,7 +31,10 @@ func (it *unindexedMessageIterator) Next() (*ChannelInfo, *Message, error) { } } case TokenMessage: - message := parseMessage(token.bytes()) + message, err := parseMessage(token.bytes()) + if err != nil { + return nil, nil, err + } if _, ok := it.channels[message.ChannelID]; !ok { // skip messages on channels we don't know about. Note that if // an unindexed reader encounters a message it would be diff --git a/go/libmcap/writer_test.go b/go/libmcap/writer_test.go index 325abd1000..ed5929ec6b 100644 --- a/go/libmcap/writer_test.go +++ b/go/libmcap/writer_test.go @@ -15,8 +15,10 @@ func TestMCAPReadWrite(t *testing.T) { assert.Nil(t, err) err = w.WriteHeader("ros1", "", map[string]string{"foo": "bar"}) assert.Nil(t, err) - lexer := NewLexer(buf) - token := lexer.Next() + lexer, err := NewLexer(buf) + assert.Nil(t, err) + token, err := lexer.Next() + assert.Nil(t, err) // body of the header is the profile, followed by the metadata map offset := 0 data := token.bytes() @@ -75,7 +77,8 @@ func TestChunkedReadWrite(t *testing.T) { }) assert.Nil(t, w.Close()) assert.Nil(t, err) - lexer := NewLexer(buf) + lexer, err := NewLexer(buf) + assert.Nil(t, err) for i, expected := range []TokenType{ TokenHeader, TokenChannelInfo, @@ -83,9 +86,9 @@ func TestChunkedReadWrite(t *testing.T) { TokenChannelInfo, TokenStatistics, TokenFooter, - TokenEOF, } { - tok := lexer.Next() + tok, err := lexer.Next() + assert.Nil(t, err) _ = tok.bytes() // need to read the data assert.Equal(t, expected, tok.TokenType, fmt.Sprintf("want %s got %s at %d", Token{expected, 0, nil}, tok.TokenType, i)) } @@ -131,9 +134,10 @@ func TestUnchunkedReadWrite(t *testing.T) { Data: []byte{0x01, 0x02, 0x03, 0x04}, }) assert.Nil(t, err) - w.Close() + assert.Nil(t, w.Close()) - lexer := NewLexer(buf) + lexer, err := NewLexer(buf) + assert.Nil(t, err) for _, expected := range []TokenType{ TokenHeader, TokenChannelInfo, @@ -143,9 +147,9 @@ func TestUnchunkedReadWrite(t *testing.T) { TokenAttachmentIndex, TokenStatistics, TokenFooter, - TokenEOF, } { - tok := lexer.Next() + tok, err := lexer.Next() + assert.Nil(t, err) _ = tok.bytes() assert.Equal(t, expected, tok.TokenType, fmt.Sprintf("want %s got %s", Token{expected, 0, nil}, tok)) } diff --git a/go/mcap/cmd/cat.go b/go/mcap/cmd/cat.go index 547fb85594..b96765ae18 100644 --- a/go/mcap/cmd/cat.go +++ b/go/mcap/cmd/cat.go @@ -26,7 +26,10 @@ var catCmd = &cobra.Command{ if err != nil { log.Fatal(err) } - reader := libmcap.NewReader(f) + reader, err := libmcap.NewReader(f) + if err != nil { + log.Fatal(err) + } it, err := reader.Messages(start, end, topics, true) if err != nil { log.Fatal(err) diff --git a/go/mcap/cmd/info.go b/go/mcap/cmd/info.go index cda29a6b95..3d1b2f5028 100644 --- a/go/mcap/cmd/info.go +++ b/go/mcap/cmd/info.go @@ -20,7 +20,10 @@ var infoCmd = &cobra.Command{ if err != nil { log.Fatal(err) } - reader := libmcap.NewReader(r) + reader, err := libmcap.NewReader(r) + if err != nil { + log.Fatal(err) + } info, err := reader.Info() if err != nil { log.Fatal(err) From aabbce38847dad392f735c6949d0128f935a95bf Mon Sep 17 00:00:00 2001 From: Roman Shtylman Date: Thu, 27 Jan 2022 14:22:01 -0800 Subject: [PATCH 036/635] Update spec (#69) Updates from team discussion. --- docs/specification/README.md | 469 ++++++++++++++---- docs/specification/profiles/README.md | 15 +- docs/specification/profiles/ros1.md | 18 +- docs/specification/profiles/ros2.md | 18 +- docs/specification/well-known-encodings.md | 6 + .../well-known-schema-formats.md | 28 ++ 6 files changed, 431 insertions(+), 123 deletions(-) create mode 100644 docs/specification/well-known-encodings.md create mode 100644 docs/specification/well-known-schema-formats.md diff --git a/docs/specification/README.md b/docs/specification/README.md index 5fd4c3abe2..e4a1d98d48 100644 --- a/docs/specification/README.md +++ b/docs/specification/README.md @@ -1,128 +1,153 @@ # MCAP File Format Specification -[tlv wiki]: https://en.wikipedia.org/wiki/Type-length-value -[profiles]: ./profiles -[compression formats]: ./compression/supported-compression-formats.md -[explanatory notes]: ./notes/explanatory-notes.md -[diagram unchunked]: ./diagrams/unchunked.png -[diagram chunked]: ./diagrams/chunked.png -[feature explanations]: ./notes/explanatory-notes.md#feature-explanations - > Status: DRAFT ## Overview -MCAP is a container file format for append-only storage of heterogeneously-schematized data. It is inspired by the ROS1 bag format and is intended to support flexible serialization options, while also generalizing to non-ROS systems and retaining characteristics such as self-containment and chunk compression. Features include: +MCAP is a modular container file format for recording timestamped [pub/sub](https://en.wikipedia.org/wiki/Publish–subscribe_pattern) messages with arbitrary serialization formats. + +MCAP files are designed to work well under various workloads, resource constraints, and durability requirements. -- Single-pass, indexed writes (no backward seeking) -- Flexible message serialization options (e.g. ros1, protobuf, …) -- Self-contained (message schemas are included in the file) -- Fast remote file summarization -- File attachments -- Optional chunk compression -- Optional CRC integrity checks +- [Structure](#file-structure) + - [Header](#header) + - [Footer](#footer) + - [Data Section](#data-section) + - [Summary Section](#summary-section) +- [Records](#records) + - [Header](#header-op0x01) + - [Footer](#footer-op0x02) + - [Channel Info](#channel-info-op0x03) + - [Message](#message-op0x04) + - [Chunk](#chunk-op0x05) + - [Message Index](#message-index-op0x06) + - [Chunk Index](#chunk-index-op0x07) + - [Attachment](#attachment-op0x08) + - [Attachment Index](#attachment-index-op0x09) + - [Statistics](#statistics-op0x0A) + - [Metadata](#metadata-op0x0B) + - [Metadata Index](#metadata-op0x0C) + - [Summary Offset](#summary-offset-op0x0D) +- [Serialization](#serialization) -### Glossary +## File Structure -Some helpful terms to understand in the following sections are: +A valid MCAP file is structured as follows. The Summary and Summary Offset sections are optional. -- **Record**: A [TLV triplet][tlv wiki] with type and value corresponding to one of the opcodes and schemas below. -- **Topic**: A named message type and associated schema. -- **Channel**: A logical stream that contains messages on a single topic. Channels are associated with a numeric ID by the recorder - the **Channel ID**. -- **Channel Info**: A type of record describing information about a channel, notably containing the name and schema of the topic. -- **Message**: A type of record representing a timestamped message on a channel (and therefore associated with a topic/schema). A message can be parsed by a reader that has also read the channel info for the channel on which the message appears. -- **Chunk**: A record type that wraps a compressed set of channel info and message records. -- **Attachment**: Extra data that may be included in the file, outside the chunks. Attachments may be quickly listed and accessed via an index at the end of the file. -- **Index**: The format contains indexes for both messages and attachments. For messages, there are two levels of indexing - a **Chunk Index** at the end of the file points to chunks by offset, enabling fast location of chunks based on channel and timerange. A second index - the **Message Index** - after each chunk contains, for each channel in the chunk, and offset and timestamp for every message to allow fast location of messages within the uncompressed chunk data. The attachment index at the end of the file allows for fast listing and location of attachments based on name, timestamp, or attachment type. -- **Statistics**: A type of record at the end of the file, used to support fast summarization of file contents. -- **Message Data Section**: Used in this doc to refer to the first portion of the file that contains chunks and message data. To be distinguished from the **Index Data Section**. -- **Index Data Section**: The last part of the file, containing records used for searching and summarizing the file. The Index Data section is split into a **channel info portion**, **chunk index portion**, and **attachment index portion** each containing contiguous runs of the corresponding record type, followed by a **Statistics** record. All portions of the index data section are optional, subject to constraints and tradeoffs described below. There are no other record types in the index data section. +
[][]