From 11393774c1c716768aa17ae0fe32e296bc0d0d86 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 5 Aug 2022 10:40:00 +0100 Subject: [PATCH 01/14] fix: increase encoding performance Attempts to optimise encoding behaviour --- .gitignore | 1 + packages/protons-benchmark/package.json | 6 +- packages/protons-benchmark/src/encode.ts | 46 +++++++ packages/protons-benchmark/src/index.ts | 42 +++--- packages/protons-runtime/package.json | 4 +- packages/protons-runtime/src/codec.ts | 2 +- packages/protons-runtime/src/codecs/bool.ts | 7 +- packages/protons-runtime/src/codecs/bytes.ts | 18 ++- packages/protons-runtime/src/codecs/double.ts | 14 +- packages/protons-runtime/src/codecs/enum.ts | 9 +- .../protons-runtime/src/codecs/fixed32.ts | 14 +- .../protons-runtime/src/codecs/fixed64.ts | 14 +- packages/protons-runtime/src/codecs/float.ts | 14 +- packages/protons-runtime/src/codecs/int32.ts | 10 +- packages/protons-runtime/src/codecs/int64.ts | 10 +- .../protons-runtime/src/codecs/message.ts | 25 ++-- .../protons-runtime/src/codecs/sfixed32.ts | 14 +- .../protons-runtime/src/codecs/sfixed64.ts | 14 +- packages/protons-runtime/src/codecs/sint32.ts | 9 +- packages/protons-runtime/src/codecs/sint64.ts | 9 +- packages/protons-runtime/src/codecs/string.ts | 27 ++-- packages/protons-runtime/src/codecs/uint32.ts | 11 +- packages/protons-runtime/src/codecs/uint64.ts | 9 +- packages/protons-runtime/src/decode.ts | 2 +- packages/protons-runtime/src/encode.ts | 17 ++- packages/protons-runtime/src/utils/alloc.ts | 12 -- packages/protons-runtime/src/utils/utf8.ts | 123 ++++++++++++++++++ packages/protons/package.json | 2 +- 28 files changed, 382 insertions(+), 103 deletions(-) create mode 100644 packages/protons-benchmark/src/encode.ts delete mode 100644 packages/protons-runtime/src/utils/alloc.ts create mode 100644 packages/protons-runtime/src/utils/utf8.ts diff --git a/.gitignore b/.gitignore index 6134bbc..3d255fa 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ node_modules package-lock.json yarn.lock .clinic +coverage diff --git a/packages/protons-benchmark/package.json b/packages/protons-benchmark/package.json index b0e27d7..b3d3ac8 100644 --- a/packages/protons-benchmark/package.json +++ b/packages/protons-benchmark/package.json @@ -55,7 +55,8 @@ "sourceType": "module" }, "ignorePatterns": [ - "src/protobufjs/*.ts" + "src/pbjs/*", + "src/protobufjs/*" ] }, "scripts": { @@ -67,8 +68,9 @@ "start": "node dist/src/index.js" }, "dependencies": { + "@types/benchmark": "^2.1.1", "aegir": "^37.0.5", - "benny": "^3.7.1", + "benchmark": "^2.1.4", "pbjs": "^0.0.14", "protobufjs": "^6.11.2", "protons": "^4.0.0", diff --git a/packages/protons-benchmark/src/encode.ts b/packages/protons-benchmark/src/encode.ts new file mode 100644 index 0000000..aff560b --- /dev/null +++ b/packages/protons-benchmark/src/encode.ts @@ -0,0 +1,46 @@ +/* eslint-disable no-console */ + +/* +$ node dist/src/index.js +$ npx playwright-test dist/src/index.js --runner benchmark +*/ + +import Benchmark from 'benchmark' +import { Test as ProtonsTest } from './protons/bench.js' +import { encodeTest as pbjsEncodeTest } from './pbjs/bench.js' +import { Test as ProtobufjsTest } from './protobufjs/bench.js' + +const message = { + meh: { + lol: 'sdkljfoee', + b: { + tmp: { + baz: 2309292 + } + } + }, + hello: 3493822, + foo: 'derp derp derp', + payload: Uint8Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +} + +new Benchmark.Suite() + .add('pbjs', () => { + pbjsEncodeTest(message) + }) + .add('protons', () => { + ProtonsTest.encode(message) + }) + .add('protobufjs', () => { + ProtobufjsTest.encode(message).finish() + }) +// add listeners + .on('cycle', (event: any) => { + console.log(String(event.target)) + }) + .on('complete', function () { + // @ts-expect-error types are wrong + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + // run async + .run({ async: true }) diff --git a/packages/protons-benchmark/src/index.ts b/packages/protons-benchmark/src/index.ts index 88fe0c3..49fc81c 100644 --- a/packages/protons-benchmark/src/index.ts +++ b/packages/protons-benchmark/src/index.ts @@ -1,5 +1,11 @@ +/* eslint-disable no-console */ -import benny from 'benny' +/* +$ node dist/src/index.js +$ npx playwright-test dist/src/index.js --runner benchmark +*/ + +import Benchmark from 'benchmark' import { expect } from 'aegir/chai' import { Test as ProtonsTest } from './protons/bench.js' import { encodeTest as pbjsEncodeTest, decodeTest as pbjsDecodeTest } from './pbjs/bench.js' @@ -27,30 +33,34 @@ function expectDecodedCorrectly (result: any) { expect(result).to.have.property('payload').that.equalBytes(message.payload) } -void benny.suite( - 'Encode/Decode', - - benny.add('pbjs', () => { +new Benchmark.Suite() + .add('pbjs', () => { const buf = pbjsEncodeTest(message) const result = pbjsDecodeTest(buf) expectDecodedCorrectly(result) - }), - - benny.add('protons', () => { + }) + .add('protons', () => { const buf = ProtonsTest.encode(message) const result = ProtonsTest.decode(buf) expectDecodedCorrectly(result) - }), - - benny.add('protobufjs', () => { + }) + .add('protobufjs', () => { const buf = ProtobufjsTest.encode(message).finish() const result = ProtobufjsTest.decode(buf) expectDecodedCorrectly(result) - }), - - benny.cycle(), - benny.complete() -) + }) + .on('error', (err: Error) => { + console.error(err) + }) + .on('cycle', (event: any) => { + console.info(String(event.target)) + }) + .on('complete', function () { + // @ts-expect-error types are wrong + console.info(`Fastest is ${this.filter('fastest').map('name')}`) // eslint-disable-line @typescript-eslint/restrict-template-expressions + }) + // run async + .run({ async: true }) diff --git a/packages/protons-runtime/package.json b/packages/protons-runtime/package.json index 6eddb0a..dae8399 100644 --- a/packages/protons-runtime/package.json +++ b/packages/protons-runtime/package.json @@ -151,8 +151,8 @@ "byte-access": "^1.0.1", "longbits": "^1.1.0", "uint8-varint": "^1.0.2", - "uint8arraylist": "^2.0.0", - "uint8arrays": "^3.0.0" + "uint8arraylist": "^2.3.1", + "uint8arrays": "^3.1.0" }, "devDependencies": { "aegir": "^37.0.5" diff --git a/packages/protons-runtime/src/codec.ts b/packages/protons-runtime/src/codec.ts index ab9a050..e92dbba 100644 --- a/packages/protons-runtime/src/codec.ts +++ b/packages/protons-runtime/src/codec.ts @@ -11,7 +11,7 @@ export enum CODEC_TYPES { } export interface EncodeFunction { - (value: T): Uint8Array | Uint8ArrayList + (value: T): { bufs: Uint8Array[], length: number } } export interface DecodeFunction { diff --git a/packages/protons-runtime/src/codecs/bool.ts b/packages/protons-runtime/src/codecs/bool.ts index ec9932b..9073ee1 100644 --- a/packages/protons-runtime/src/codecs/bool.ts +++ b/packages/protons-runtime/src/codecs/bool.ts @@ -6,7 +6,12 @@ const encodingLength: EncodingLengthFunction = function boolEncodingLen } const encode: EncodeFunction = function boolEncode (value) { - return Uint8Array.from([value ? 1 : 0]) + return { + bufs: [ + Uint8Array.from([value ? 1 : 0]) + ], + length: 1 + } } const decode: DecodeFunction = function boolDecode (buffer, offset) { diff --git a/packages/protons-runtime/src/codecs/bytes.ts b/packages/protons-runtime/src/codecs/bytes.ts index e288b14..9b7f5e6 100644 --- a/packages/protons-runtime/src/codecs/bytes.ts +++ b/packages/protons-runtime/src/codecs/bytes.ts @@ -1,8 +1,8 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { allocUnsafe } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function bytesEncodingLength (val) { const len = val.byteLength @@ -10,10 +10,18 @@ const encodingLength: EncodingLengthFunction = function bytesEncodin } const encode: EncodeFunction = function bytesEncode (val) { - return new Uint8ArrayList( - unsigned.encode(val.byteLength), - val - ) + const lenLen = unsigned.encodingLength(val.byteLength) + const buf = allocUnsafe(lenLen + val.byteLength) + unsigned.encode(val.byteLength, buf) + + buf.set(val, lenLen) + + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function bytesDecode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/double.ts b/packages/protons-runtime/src/codecs/double.ts index 9aabc63..354075c 100644 --- a/packages/protons-runtime/src/codecs/double.ts +++ b/packages/protons-runtime/src/codecs/double.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function doubleEncodingLength () { return 8 } const encode: EncodeFunction = function doubleEncode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(val))) - buf.setFloat64(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setFloat64(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function doubleDecode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/enum.ts b/packages/protons-runtime/src/codecs/enum.ts index f5648d5..79c738b 100644 --- a/packages/protons-runtime/src/codecs/enum.ts +++ b/packages/protons-runtime/src/codecs/enum.ts @@ -2,7 +2,7 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction, Codec } from '../codec.js' -import { allocUnsafe } from '../utils/alloc.js' +import { allocUnsafe } from 'uint8arrays/alloc' export function enumeration (v: any): Codec { function findValue (val: string | number): number { @@ -27,7 +27,12 @@ export function enumeration (v: any): Codec { const buf = allocUnsafe(unsigned.encodingLength(enumValue)) unsigned.encode(enumValue, buf) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function enumDecode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/fixed32.ts b/packages/protons-runtime/src/codecs/fixed32.ts index 727e547..f4ff908 100644 --- a/packages/protons-runtime/src/codecs/fixed32.ts +++ b/packages/protons-runtime/src/codecs/fixed32.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function fixed32EncodingLength () { return 4 } const encode: EncodeFunction = function fixed32Encode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(val))) - buf.setInt32(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setInt32(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function fixed32Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/fixed64.ts b/packages/protons-runtime/src/codecs/fixed64.ts index cc3bc78..a576fc6 100644 --- a/packages/protons-runtime/src/codecs/fixed64.ts +++ b/packages/protons-runtime/src/codecs/fixed64.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function int64EncodingLength (val) { return 8 } const encode: EncodeFunction = function int64Encode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(val))) - buf.setBigInt64(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setBigInt64(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function int64Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/float.ts b/packages/protons-runtime/src/codecs/float.ts index 7ccdda2..1542235 100644 --- a/packages/protons-runtime/src/codecs/float.ts +++ b/packages/protons-runtime/src/codecs/float.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function floatEncodingLength () { return 4 } const encode: EncodeFunction = function floatEncode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(1))) - buf.setFloat32(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setFloat32(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function floatDecode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/int32.ts b/packages/protons-runtime/src/codecs/int32.ts index 9755e63..078096b 100644 --- a/packages/protons-runtime/src/codecs/int32.ts +++ b/packages/protons-runtime/src/codecs/int32.ts @@ -1,6 +1,7 @@ import { signed } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function int32EncodingLength (val) { if (val < 0) { @@ -11,9 +12,14 @@ const encodingLength: EncodingLengthFunction = function int32EncodingLen } const encode: EncodeFunction = function int32Encode (val) { - const buf = new Uint8Array(encodingLength(val)) + const buf = signed.encode(val, alloc(encodingLength(val))) - return signed.encode(val, buf) + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function int32Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/int64.ts b/packages/protons-runtime/src/codecs/int64.ts index 801bc4b..bedcecf 100644 --- a/packages/protons-runtime/src/codecs/int64.ts +++ b/packages/protons-runtime/src/codecs/int64.ts @@ -1,6 +1,7 @@ import { signed } from 'uint8-varint/big' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function int64EncodingLength (val) { if (val < 0n) { @@ -11,9 +12,14 @@ const encodingLength: EncodingLengthFunction = function int64EncodingLen } const encode: EncodeFunction = function int64Encode (val) { - const buf = new Uint8Array(encodingLength(val)) + const buf = signed.encode(val, alloc(encodingLength(val))) - return signed.encode(val, buf) + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function int64Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index c259bef..a343cce 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -1,9 +1,7 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction, Codec } from '../codec.js' -import { Uint8ArrayList } from 'uint8arraylist' import type { FieldDefs, FieldDef } from '../index.js' -import { allocUnsafe } from '../utils/alloc.js' export interface Factory { new (obj: A): T @@ -21,7 +19,10 @@ export function message (fieldDefs: FieldDefs): Codec { } const encode: EncodeFunction> = function messageEncode (val) { - const bytes = new Uint8ArrayList() + const bufs: Uint8Array[] = [ + new Uint8Array(0) // will hold length prefix + ] + let length = 0 function encodeValue (value: any, fieldNumber: number, fieldDef: FieldDef) { if (value == null) { @@ -33,12 +34,12 @@ export function message (fieldDefs: FieldDefs): Codec { } const key = (fieldNumber << 3) | fieldDef.codec.type - const prefix = allocUnsafe(unsigned.encodingLength(key)) - unsigned.encode(key, prefix) + const prefix = unsigned.encode(key) const encoded = fieldDef.codec.encode(value) - bytes.append(prefix) - bytes.append(encoded) + bufs.push(prefix, ...encoded.bufs) + length += encoded.length + length += prefix.byteLength } for (const [fieldNumberStr, fieldDef] of Object.entries(fieldDefs)) { @@ -57,9 +58,15 @@ export function message (fieldDefs: FieldDefs): Codec { } } - const prefix = unsigned.encode(bytes.length) + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength - return new Uint8ArrayList(prefix, bytes) + return { + bufs, + length + } } const decode: DecodeFunction = function messageDecode (buffer, offset) { diff --git a/packages/protons-runtime/src/codecs/sfixed32.ts b/packages/protons-runtime/src/codecs/sfixed32.ts index 3958974..2ec6eae 100644 --- a/packages/protons-runtime/src/codecs/sfixed32.ts +++ b/packages/protons-runtime/src/codecs/sfixed32.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function sfixed32EncodingLength () { return 4 } const encode: EncodeFunction = function sfixed32Encode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(val))) - buf.setInt32(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setInt32(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function sfixed32Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/sfixed64.ts b/packages/protons-runtime/src/codecs/sfixed64.ts index 9e9a0ff..978932d 100644 --- a/packages/protons-runtime/src/codecs/sfixed64.ts +++ b/packages/protons-runtime/src/codecs/sfixed64.ts @@ -1,16 +1,22 @@ -import { Uint8ArrayList } from 'uint8arraylist' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import { alloc } from 'uint8arrays/alloc' const encodingLength: EncodingLengthFunction = function sfixed64EncodingLength () { return 8 } const encode: EncodeFunction = function sfixed64Encode (val) { - const buf = new Uint8ArrayList(new Uint8Array(encodingLength(val))) - buf.setBigInt64(0, val, true) + const buf = alloc(encodingLength(val)) + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + view.setBigInt64(0, val, true) - return buf + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function sfixed64Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/sint32.ts b/packages/protons-runtime/src/codecs/sint32.ts index 469b9fe..c36cd42 100644 --- a/packages/protons-runtime/src/codecs/sint32.ts +++ b/packages/protons-runtime/src/codecs/sint32.ts @@ -7,7 +7,14 @@ const encodingLength: EncodingLengthFunction = function sint32EncodingLe } const encode: EncodeFunction = function svarintEncode (val) { - return zigzag.encode(val) + const buf = zigzag.encode(val) + + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function svarintDecode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/sint64.ts b/packages/protons-runtime/src/codecs/sint64.ts index bd1d3f4..553d915 100644 --- a/packages/protons-runtime/src/codecs/sint64.ts +++ b/packages/protons-runtime/src/codecs/sint64.ts @@ -7,7 +7,14 @@ const encodingLength: EncodingLengthFunction = function int64EncodingLen } const encode: EncodeFunction = function int64Encode (val) { - return zigzag.encode(val) + const buf = zigzag.encode(val) + + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function int64Decode (buf, offset) { diff --git a/packages/protons-runtime/src/codecs/string.ts b/packages/protons-runtime/src/codecs/string.ts index 58a317f..b206b59 100644 --- a/packages/protons-runtime/src/codecs/string.ts +++ b/packages/protons-runtime/src/codecs/string.ts @@ -1,29 +1,36 @@ import { unsigned } from 'uint8-varint' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import { createCodec, CODEC_TYPES } from '../codec.js' +import { allocUnsafe } from 'uint8arrays/alloc' +import * as utf8 from '../utils/utf8.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' -import { Uint8ArrayList } from 'uint8arraylist' const encodingLength: EncodingLengthFunction = function stringEncodingLength (val) { - const len = uint8ArrayFromString(val).byteLength + const len = utf8.length(val) return unsigned.encodingLength(len) + len } const encode: EncodeFunction = function stringEncode (val) { - const asBuf = uint8ArrayFromString(val) + const strLen = utf8.length(val) + const lenLen = unsigned.encodingLength(strLen) + const buf = allocUnsafe(lenLen + strLen) + unsigned.encode(strLen, buf) - return new Uint8ArrayList( - unsigned.encode(asBuf.byteLength), - asBuf - ) + utf8.write(val, buf, lenLen) + + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function stringDecode (buf, offset) { const strLen = unsigned.decode(buf, offset) offset += unsigned.encodingLength(strLen) + const b = buf.subarray(offset, offset + strLen) - return uint8ArrayToString(buf.subarray(offset, offset + strLen)) + return utf8.read(b, 0, b.byteLength) } export const string = createCodec('string', CODEC_TYPES.LENGTH_DELIMITED, encode, decode, encodingLength) diff --git a/packages/protons-runtime/src/codecs/uint32.ts b/packages/protons-runtime/src/codecs/uint32.ts index 9c894ca..5b45217 100644 --- a/packages/protons-runtime/src/codecs/uint32.ts +++ b/packages/protons-runtime/src/codecs/uint32.ts @@ -7,15 +7,18 @@ const encodingLength: EncodingLengthFunction = function uint32EncodingLe } const encode: EncodeFunction = function uint32Encode (val) { - // val = val < 0 ? val + 4294967296 : val + const buf = unsigned.encode(val) - return unsigned.encode(val) + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function uint32Decode (buf, offset) { return unsigned.decode(buf, offset) - - // return value > 2147483647 ? value - 4294967296 : value } export const uint32 = createCodec('uint32', CODEC_TYPES.VARINT, encode, decode, encodingLength) diff --git a/packages/protons-runtime/src/codecs/uint64.ts b/packages/protons-runtime/src/codecs/uint64.ts index efad6ad..b61d2b6 100644 --- a/packages/protons-runtime/src/codecs/uint64.ts +++ b/packages/protons-runtime/src/codecs/uint64.ts @@ -7,7 +7,14 @@ const encodingLength: EncodingLengthFunction = function uint64EncodingLe } const encode: EncodeFunction = function uint64Encode (val) { - return unsigned.encode(val) + const buf = unsigned.encode(val) + + return { + bufs: [ + buf + ], + length: buf.byteLength + } } const decode: DecodeFunction = function uint64Decode (buf, offset) { diff --git a/packages/protons-runtime/src/decode.ts b/packages/protons-runtime/src/decode.ts index 6216804..6384bc7 100644 --- a/packages/protons-runtime/src/decode.ts +++ b/packages/protons-runtime/src/decode.ts @@ -1,7 +1,7 @@ import { Uint8ArrayList } from 'uint8arraylist' import { unsigned } from 'uint8-varint' import type { Codec } from './codec.js' -import { allocUnsafe } from './utils/alloc.js' +import { allocUnsafe } from 'uint8arrays/alloc' export function decodeMessage (buf: Uint8Array | Uint8ArrayList, codec: Codec): T { // wrap root message diff --git a/packages/protons-runtime/src/encode.ts b/packages/protons-runtime/src/encode.ts index ff343e1..fb65c44 100644 --- a/packages/protons-runtime/src/encode.ts +++ b/packages/protons-runtime/src/encode.ts @@ -1,15 +1,20 @@ import { Uint8ArrayList } from 'uint8arraylist' import type { Codec } from './codec.js' -import { unsigned } from 'uint8-varint' export function encodeMessage (message: T, codec: Codec): Uint8ArrayList { - // unwrap root message const encoded = codec.encode(message) - const skip = unsigned.encodingLength(unsigned.decode(encoded)) + const list = Uint8ArrayList.fromUint8Arrays(encoded.bufs, encoded.length) - if (encoded instanceof Uint8Array) { - return new Uint8ArrayList(encoded.subarray(skip)) + // unwrap root message - it is prefixed by a varint so skip those bytes + let skip = 0 + for (let i = 0; i < list.byteLength; i++) { + skip = i + 1 + + // when the MSB is not 1, there are no more bytes in this varint + if ((list.get(i) & 0x80) !== 0x80) { + break + } } - return encoded.sublist(skip) + return list.sublist(skip) } diff --git a/packages/protons-runtime/src/utils/alloc.ts b/packages/protons-runtime/src/utils/alloc.ts deleted file mode 100644 index e870fd8..0000000 --- a/packages/protons-runtime/src/utils/alloc.ts +++ /dev/null @@ -1,12 +0,0 @@ - -export function alloc (len: number) { - return new Uint8Array(len) -} - -export function allocUnsafe (len: number) { - if (globalThis?.Buffer?.allocUnsafe != null) { - return globalThis.Buffer.allocUnsafe(len) - } - - return new Uint8Array(len) -} diff --git a/packages/protons-runtime/src/utils/utf8.ts b/packages/protons-runtime/src/utils/utf8.ts new file mode 100644 index 0000000..1efa206 --- /dev/null +++ b/packages/protons-runtime/src/utils/utf8.ts @@ -0,0 +1,123 @@ +/** + * A minimal UTF8 implementation for number arrays. + * + * @memberof util + * @namespace + */ + +/** + * Calculates the UTF8 byte length of a string. + * + * @param {string} string - String + * @returns {number} Byte length + */ +export function length (string: string) { + let len = 0 + let c = 0 + for (let i = 0; i < string.length; ++i) { + c = string.charCodeAt(i) + + if (c < 128) { + len += 1 + } else if (c < 2048) { + len += 2 + } else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { + ++i + len += 4 + } else { + len += 3 + } + } + + return len +} + +/** + * Reads UTF8 bytes as a string. + * + * @param {Uint8Array} buffer - Source buffer + * @param {number} start - Source start + * @param {number} end - Source end + * @returns {string} String read + */ +export function read (buffer: Uint8Array, start: number, end: number) { + const len = end - start + + if (len < 1) { + return '' + } + + let parts: string[] | undefined + const chunk: number[] = [] + let i = 0 // char offset + let t: number // temporary + + while (start < end) { + t = buffer[start++] + + if (t < 128) { + chunk[i++] = t + } else if (t > 191 && t < 224) { + chunk[i++] = (t & 31) << 6 | buffer[start++] & 63 + } else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000 + chunk[i++] = 0xD800 + (t >> 10) + chunk[i++] = 0xDC00 + (t & 1023) + } else { + chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63 + } + + if (i > 8191) { + (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk)) + i = 0 + } + } + + if (parts != null) { + if (i > 0) { + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))) + } + + return parts.join('') + } + + return String.fromCharCode.apply(String, chunk.slice(0, i)) +} + +/** + * Writes a string as UTF8 bytes. + * + * @param {string} string - Source string + * @param {Uint8Array} buffer - Destination buffer + * @param {number} offset - Destination offset + * @returns {number} Bytes written + */ +export function write (string: string, buffer: Uint8Array, offset: number) { + const start = offset + let c1 // character 1 + let c2 // character 2 + + for (let i = 0; i < string.length; ++i) { + c1 = string.charCodeAt(i) + + if (c1 < 128) { + buffer[offset++] = c1 + } else if (c1 < 2048) { + buffer[offset++] = c1 >> 6 | 192 + buffer[offset++] = c1 & 63 | 128 + } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { + c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF) + ++i + buffer[offset++] = c1 >> 18 | 240 + buffer[offset++] = c1 >> 12 & 63 | 128 + buffer[offset++] = c1 >> 6 & 63 | 128 + buffer[offset++] = c1 & 63 | 128 + } else { + buffer[offset++] = c1 >> 12 | 224 + buffer[offset++] = c1 >> 6 & 63 | 128 + buffer[offset++] = c1 & 63 | 128 + } + } + + return offset - start +} diff --git a/packages/protons/package.json b/packages/protons/package.json index dc56e43..e1d3b4b 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -160,6 +160,6 @@ "aegir": "^37.0.5", "pbjs": "^0.0.14", "protons-runtime": "^2.0.0", - "uint8arraylist": "^2.0.0" + "uint8arraylist": "^2.3.1" } } From aabf1edd3b4dca6ef2885b572a56b27cd9330b85 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 5 Aug 2022 10:47:33 +0100 Subject: [PATCH 02/14] chore: remove file --- packages/protons-benchmark/src/encode.ts | 46 ------------------------ 1 file changed, 46 deletions(-) delete mode 100644 packages/protons-benchmark/src/encode.ts diff --git a/packages/protons-benchmark/src/encode.ts b/packages/protons-benchmark/src/encode.ts deleted file mode 100644 index aff560b..0000000 --- a/packages/protons-benchmark/src/encode.ts +++ /dev/null @@ -1,46 +0,0 @@ -/* eslint-disable no-console */ - -/* -$ node dist/src/index.js -$ npx playwright-test dist/src/index.js --runner benchmark -*/ - -import Benchmark from 'benchmark' -import { Test as ProtonsTest } from './protons/bench.js' -import { encodeTest as pbjsEncodeTest } from './pbjs/bench.js' -import { Test as ProtobufjsTest } from './protobufjs/bench.js' - -const message = { - meh: { - lol: 'sdkljfoee', - b: { - tmp: { - baz: 2309292 - } - } - }, - hello: 3493822, - foo: 'derp derp derp', - payload: Uint8Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) -} - -new Benchmark.Suite() - .add('pbjs', () => { - pbjsEncodeTest(message) - }) - .add('protons', () => { - ProtonsTest.encode(message) - }) - .add('protobufjs', () => { - ProtobufjsTest.encode(message).finish() - }) -// add listeners - .on('cycle', (event: any) => { - console.log(String(event.target)) - }) - .on('complete', function () { - // @ts-expect-error types are wrong - console.log('Fastest is ' + this.filter('fastest').map('name')) - }) - // run async - .run({ async: true }) From 79aec2e4a6eaf7f2f7f3228a64d58508a9d9d206 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sun, 7 Aug 2022 12:00:13 +0100 Subject: [PATCH 03/14] fix: use list of properties and not map --- .../protons-benchmark/src/protons/bench.ts | 38 ++-- .../protons-runtime/src/codecs/message.ts | 28 ++- packages/protons-runtime/src/index.ts | 3 +- packages/protons/src/index.ts | 6 +- packages/protons/test/fixtures/basic.ts | 8 +- packages/protons/test/fixtures/circuit.ts | 20 +- packages/protons/test/fixtures/daemon.ts | 200 +++++++++--------- packages/protons/test/fixtures/dht.ts | 40 ++-- packages/protons/test/fixtures/noise.ts | 10 +- packages/protons/test/fixtures/peer.ts | 30 +-- packages/protons/test/fixtures/test.ts | 46 ++-- 11 files changed, 219 insertions(+), 210 deletions(-) diff --git a/packages/protons-benchmark/src/protons/bench.ts b/packages/protons-benchmark/src/protons/bench.ts index 328e1a5..b0055e4 100644 --- a/packages/protons-benchmark/src/protons/bench.ts +++ b/packages/protons-benchmark/src/protons/bench.ts @@ -14,9 +14,9 @@ export namespace Foo { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'baz', codec: uint32 } - }) + _codec = message([ + { id: 1, name: 'baz', codec: uint32 } + ]) } return _codec @@ -40,9 +40,9 @@ export namespace Bar { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'tmp', codec: Foo.codec() } - }) + _codec = message([ + { id: 1, name: 'tmp', codec: Foo.codec() } + ]) } return _codec @@ -81,9 +81,9 @@ export namespace Yo { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'lol', codec: FOO.codec(), repeats: true } - }) + _codec = message([ + { id: 1, name: 'lol', codec: FOO.codec(), repeats: true } + ]) } return _codec @@ -108,10 +108,10 @@ export namespace Lol { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'lol', codec: string }, - 2: { name: 'b', codec: Bar.codec() } - }) + _codec = message([ + { id: 1, name: 'lol', codec: string }, + { id: 2, name: 'b', codec: Bar.codec() } + ]) } return _codec @@ -138,12 +138,12 @@ export namespace Test { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 6: { name: 'meh', codec: Lol.codec() }, - 3: { name: 'hello', codec: uint32 }, - 1: { name: 'foo', codec: string }, - 7: { name: 'payload', codec: bytes } - }) + _codec = message([ + { id: 6, name: 'meh', codec: Lol.codec() }, + { id: 3, name: 'hello', codec: uint32 }, + { id: 1, name: 'foo', codec: string }, + { id: 7, name: 'payload', codec: bytes } + ]) } return _codec diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index a343cce..630bf4f 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -1,17 +1,25 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, EncodingLengthFunction, Codec } from '../codec.js' -import type { FieldDefs, FieldDef } from '../index.js' +import type { FieldDef } from '../index.js' export interface Factory { new (obj: A): T } -export function message (fieldDefs: FieldDefs): Codec { +export function message (fieldDefs: FieldDef[]): Codec { + // create a id => FieldDef mapping for quick access + const fieldDefLookup: Record = {} + for (const def of fieldDefs) { + fieldDefLookup[def.id] = def + } + const encodingLength: EncodingLengthFunction = function messageEncodingLength (val: Record) { let length = 0 - for (const fieldDef of Object.values(fieldDefs)) { + for (let i = 0; i < fieldDefs.length; i++) { + const fieldDef = fieldDefs[i] + length += fieldDef.codec.encodingLength(val[fieldDef.name]) } @@ -42,8 +50,8 @@ export function message (fieldDefs: FieldDefs): Codec { length += prefix.byteLength } - for (const [fieldNumberStr, fieldDef] of Object.entries(fieldDefs)) { - const fieldNumber = parseInt(fieldNumberStr) + for (let i = 0; i < fieldDefs.length; i++) { + const fieldDef = fieldDefs[i] if (fieldDef.repeats === true) { if (!Array.isArray(val[fieldDef.name])) { @@ -51,10 +59,10 @@ export function message (fieldDefs: FieldDefs): Codec { } for (const value of val[fieldDef.name]) { - encodeValue(value, fieldNumber, fieldDef) + encodeValue(value, fieldDef.id, fieldDef) } } else { - encodeValue(val[fieldDef.name], fieldNumber, fieldDef) + encodeValue(val[fieldDef.name], fieldDef.id, fieldDef) } } @@ -81,7 +89,7 @@ export function message (fieldDefs: FieldDefs): Codec { const wireType = key & 0x7 const fieldNumber = key >> 3 - const fieldDef = fieldDefs[fieldNumber] + const fieldDef = fieldDefLookup[fieldNumber] let fieldLength = 0 if (wireType === CODEC_TYPES.VARINT) { @@ -124,7 +132,9 @@ export function message (fieldDefs: FieldDefs): Codec { } // make sure repeated fields have an array if not set - for (const fieldDef of Object.values(fieldDefs)) { + for (let i = 0; i < fieldDefs.length; i++) { + const fieldDef = fieldDefs[i] + if (fieldDef.repeats === true && fields[fieldDef.name] == null) { fields[fieldDef.name] = [] } diff --git a/packages/protons-runtime/src/index.ts b/packages/protons-runtime/src/index.ts index 791a4e1..b3b83d5 100644 --- a/packages/protons-runtime/src/index.ts +++ b/packages/protons-runtime/src/index.ts @@ -1,6 +1,7 @@ import type { Codec } from './codec.js' export interface FieldDef { + id: number, name: string codec: Codec optional?: true @@ -8,8 +9,6 @@ export interface FieldDef { packed?: true } -export type FieldDefs = Record - export { decodeMessage } from './decode.js' diff --git a/packages/protons/src/index.ts b/packages/protons/src/index.ts index 0c1ad0d..52749ef 100644 --- a/packages/protons/src/index.ts +++ b/packages/protons/src/index.ts @@ -194,7 +194,7 @@ export interface ${messageDef.name} { export const codec = (): Codec<${messageDef.name}> => { if (_codec == null) { - _codec = message<${messageDef.name}>({ + _codec = message<${messageDef.name}>([ ${Object.entries(fields) .map(([name, fieldDef]) => { let codec = encoders[fieldDef.type] @@ -214,9 +214,9 @@ export interface ${messageDef.name} { moduleDef.imports.add(codec) } - return `${fieldDef.id}: { name: '${name}', codec: ${codec}${fieldDef.options?.proto3_optional === true ? ', optional: true' : ''}${fieldDef.rule === 'repeated' ? ', repeats: true' : ''} }` + return `{ id: ${fieldDef.id}, name: '${name}', codec: ${codec}${fieldDef.options?.proto3_optional === true ? ', optional: true' : ''}${fieldDef.rule === 'repeated' ? ', repeats: true' : ''} }` }).join(',\n ')} - }) + ]) } return _codec diff --git a/packages/protons/test/fixtures/basic.ts b/packages/protons/test/fixtures/basic.ts index 16fcd57..bc26c7b 100644 --- a/packages/protons/test/fixtures/basic.ts +++ b/packages/protons/test/fixtures/basic.ts @@ -15,10 +15,10 @@ export namespace Basic { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'foo', codec: string, optional: true }, - 2: { name: 'num', codec: int32 } - }) + _codec = message([ + { id: 1, name: 'foo', codec: string, optional: true }, + { id: 2, name: 'num', codec: int32 } + ]) } return _codec diff --git a/packages/protons/test/fixtures/circuit.ts b/packages/protons/test/fixtures/circuit.ts index e258e02..2582bac 100644 --- a/packages/protons/test/fixtures/circuit.ts +++ b/packages/protons/test/fixtures/circuit.ts @@ -87,10 +87,10 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'id', codec: bytes }, - 2: { name: 'addrs', codec: bytes, repeats: true } - }) + _codec = message([ + { id: 1, name: 'id', codec: bytes }, + { id: 2, name: 'addrs', codec: bytes, repeats: true } + ]) } return _codec @@ -109,12 +109,12 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: CircuitRelay.Type.codec(), optional: true }, - 2: { name: 'srcPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - 3: { name: 'dstPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - 4: { name: 'code', codec: CircuitRelay.Status.codec(), optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: CircuitRelay.Type.codec(), optional: true }, + { id: 2, name: 'srcPeer', codec: CircuitRelay.Peer.codec(), optional: true }, + { id: 3, name: 'dstPeer', codec: CircuitRelay.Peer.codec(), optional: true }, + { id: 4, name: 'code', codec: CircuitRelay.Status.codec(), optional: true } + ]) } return _codec diff --git a/packages/protons/test/fixtures/daemon.ts b/packages/protons/test/fixtures/daemon.ts index 09c9553..8c4736c 100644 --- a/packages/protons/test/fixtures/daemon.ts +++ b/packages/protons/test/fixtures/daemon.ts @@ -54,17 +54,17 @@ export namespace Request { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: Request.Type.codec() }, - 2: { name: 'connect', codec: ConnectRequest.codec(), optional: true }, - 3: { name: 'streamOpen', codec: StreamOpenRequest.codec(), optional: true }, - 4: { name: 'streamHandler', codec: StreamHandlerRequest.codec(), optional: true }, - 5: { name: 'dht', codec: DHTRequest.codec(), optional: true }, - 6: { name: 'connManager', codec: ConnManagerRequest.codec(), optional: true }, - 7: { name: 'disconnect', codec: DisconnectRequest.codec(), optional: true }, - 8: { name: 'pubsub', codec: PSRequest.codec(), optional: true }, - 9: { name: 'peerStore', codec: PeerstoreRequest.codec(), optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: Request.Type.codec() }, + { id: 2, name: 'connect', codec: ConnectRequest.codec(), optional: true }, + { id: 3, name: 'streamOpen', codec: StreamOpenRequest.codec(), optional: true }, + { id: 4, name: 'streamHandler', codec: StreamHandlerRequest.codec(), optional: true }, + { id: 5, name: 'dht', codec: DHTRequest.codec(), optional: true }, + { id: 6, name: 'connManager', codec: ConnManagerRequest.codec(), optional: true }, + { id: 7, name: 'disconnect', codec: DisconnectRequest.codec(), optional: true }, + { id: 8, name: 'pubsub', codec: PSRequest.codec(), optional: true }, + { id: 9, name: 'peerStore', codec: PeerstoreRequest.codec(), optional: true } + ]) } return _codec @@ -111,16 +111,16 @@ export namespace Response { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: Response.Type.codec() }, - 2: { name: 'error', codec: ErrorResponse.codec(), optional: true }, - 3: { name: 'streamInfo', codec: StreamInfo.codec(), optional: true }, - 4: { name: 'identify', codec: IdentifyResponse.codec(), optional: true }, - 5: { name: 'dht', codec: DHTResponse.codec(), optional: true }, - 6: { name: 'peers', codec: PeerInfo.codec(), repeats: true }, - 7: { name: 'pubsub', codec: PSResponse.codec(), optional: true }, - 8: { name: 'peerStore', codec: PeerstoreResponse.codec(), optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: Response.Type.codec() }, + { id: 2, name: 'error', codec: ErrorResponse.codec(), optional: true }, + { id: 3, name: 'streamInfo', codec: StreamInfo.codec(), optional: true }, + { id: 4, name: 'identify', codec: IdentifyResponse.codec(), optional: true }, + { id: 5, name: 'dht', codec: DHTResponse.codec(), optional: true }, + { id: 6, name: 'peers', codec: PeerInfo.codec(), repeats: true }, + { id: 7, name: 'pubsub', codec: PSResponse.codec(), optional: true }, + { id: 8, name: 'peerStore', codec: PeerstoreResponse.codec(), optional: true } + ]) } return _codec @@ -145,10 +145,10 @@ export namespace IdentifyResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'id', codec: bytes }, - 2: { name: 'addrs', codec: bytes, repeats: true } - }) + _codec = message([ + { id: 1, name: 'id', codec: bytes }, + { id: 2, name: 'addrs', codec: bytes, repeats: true } + ]) } return _codec @@ -174,11 +174,11 @@ export namespace ConnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'peer', codec: bytes }, - 2: { name: 'addrs', codec: bytes, repeats: true }, - 3: { name: 'timeout', codec: int64, optional: true } - }) + _codec = message([ + { id: 1, name: 'peer', codec: bytes }, + { id: 2, name: 'addrs', codec: bytes, repeats: true }, + { id: 3, name: 'timeout', codec: int64, optional: true } + ]) } return _codec @@ -204,11 +204,11 @@ export namespace StreamOpenRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'peer', codec: bytes }, - 2: { name: 'proto', codec: string, repeats: true }, - 3: { name: 'timeout', codec: int64, optional: true } - }) + _codec = message([ + { id: 1, name: 'peer', codec: bytes }, + { id: 2, name: 'proto', codec: string, repeats: true }, + { id: 3, name: 'timeout', codec: int64, optional: true } + ]) } return _codec @@ -233,10 +233,10 @@ export namespace StreamHandlerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'addr', codec: bytes }, - 2: { name: 'proto', codec: string, repeats: true } - }) + _codec = message([ + { id: 1, name: 'addr', codec: bytes }, + { id: 2, name: 'proto', codec: string, repeats: true } + ]) } return _codec @@ -260,9 +260,9 @@ export namespace ErrorResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'msg', codec: string } - }) + _codec = message([ + { id: 1, name: 'msg', codec: string } + ]) } return _codec @@ -288,11 +288,11 @@ export namespace StreamInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'peer', codec: bytes }, - 2: { name: 'addr', codec: bytes }, - 3: { name: 'proto', codec: string } - }) + _codec = message([ + { id: 1, name: 'peer', codec: bytes }, + { id: 2, name: 'addr', codec: bytes }, + { id: 3, name: 'proto', codec: string } + ]) } return _codec @@ -352,15 +352,15 @@ export namespace DHTRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: DHTRequest.Type.codec() }, - 2: { name: 'peer', codec: bytes, optional: true }, - 3: { name: 'cid', codec: bytes, optional: true }, - 4: { name: 'key', codec: bytes, optional: true }, - 5: { name: 'value', codec: bytes, optional: true }, - 6: { name: 'count', codec: int32, optional: true }, - 7: { name: 'timeout', codec: int64, optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: DHTRequest.Type.codec() }, + { id: 2, name: 'peer', codec: bytes, optional: true }, + { id: 3, name: 'cid', codec: bytes, optional: true }, + { id: 4, name: 'key', codec: bytes, optional: true }, + { id: 5, name: 'value', codec: bytes, optional: true }, + { id: 6, name: 'count', codec: int32, optional: true }, + { id: 7, name: 'timeout', codec: int64, optional: true } + ]) } return _codec @@ -404,11 +404,11 @@ export namespace DHTResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: DHTResponse.Type.codec() }, - 2: { name: 'peer', codec: PeerInfo.codec(), optional: true }, - 3: { name: 'value', codec: bytes, optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: DHTResponse.Type.codec() }, + { id: 2, name: 'peer', codec: PeerInfo.codec(), optional: true }, + { id: 3, name: 'value', codec: bytes, optional: true } + ]) } return _codec @@ -433,10 +433,10 @@ export namespace PeerInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'id', codec: bytes }, - 2: { name: 'addrs', codec: bytes, repeats: true } - }) + _codec = message([ + { id: 1, name: 'id', codec: bytes }, + { id: 2, name: 'addrs', codec: bytes, repeats: true } + ]) } return _codec @@ -481,12 +481,12 @@ export namespace ConnManagerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: ConnManagerRequest.Type.codec() }, - 2: { name: 'peer', codec: bytes, optional: true }, - 3: { name: 'tag', codec: string, optional: true }, - 4: { name: 'weight', codec: int64, optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: ConnManagerRequest.Type.codec() }, + { id: 2, name: 'peer', codec: bytes, optional: true }, + { id: 3, name: 'tag', codec: string, optional: true }, + { id: 4, name: 'weight', codec: int64, optional: true } + ]) } return _codec @@ -510,9 +510,9 @@ export namespace DisconnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'peer', codec: bytes } - }) + _codec = message([ + { id: 1, name: 'peer', codec: bytes } + ]) } return _codec @@ -558,11 +558,11 @@ export namespace PSRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: PSRequest.Type.codec() }, - 2: { name: 'topic', codec: string, optional: true }, - 3: { name: 'data', codec: bytes, optional: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: PSRequest.Type.codec() }, + { id: 2, name: 'topic', codec: string, optional: true }, + { id: 3, name: 'data', codec: bytes, optional: true } + ]) } return _codec @@ -591,14 +591,14 @@ export namespace PSMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'from', codec: bytes, optional: true }, - 2: { name: 'data', codec: bytes, optional: true }, - 3: { name: 'seqno', codec: bytes, optional: true }, - 4: { name: 'topicIDs', codec: string, repeats: true }, - 5: { name: 'signature', codec: bytes, optional: true }, - 6: { name: 'key', codec: bytes, optional: true } - }) + _codec = message([ + { id: 1, name: 'from', codec: bytes, optional: true }, + { id: 2, name: 'data', codec: bytes, optional: true }, + { id: 3, name: 'seqno', codec: bytes, optional: true }, + { id: 4, name: 'topicIDs', codec: string, repeats: true }, + { id: 5, name: 'signature', codec: bytes, optional: true }, + { id: 6, name: 'key', codec: bytes, optional: true } + ]) } return _codec @@ -623,10 +623,10 @@ export namespace PSResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'topics', codec: string, repeats: true }, - 2: { name: 'peerIDs', codec: bytes, repeats: true } - }) + _codec = message([ + { id: 1, name: 'topics', codec: string, repeats: true }, + { id: 2, name: 'peerIDs', codec: bytes, repeats: true } + ]) } return _codec @@ -668,11 +668,11 @@ export namespace PeerstoreRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: PeerstoreRequest.Type.codec() }, - 2: { name: 'id', codec: bytes, optional: true }, - 3: { name: 'protos', codec: string, repeats: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: PeerstoreRequest.Type.codec() }, + { id: 2, name: 'id', codec: bytes, optional: true }, + { id: 3, name: 'protos', codec: string, repeats: true } + ]) } return _codec @@ -697,10 +697,10 @@ export namespace PeerstoreResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'peer', codec: PeerInfo.codec(), optional: true }, - 2: { name: 'protos', codec: string, repeats: true } - }) + _codec = message([ + { id: 1, name: 'peer', codec: PeerInfo.codec(), optional: true }, + { id: 2, name: 'protos', codec: string, repeats: true } + ]) } return _codec diff --git a/packages/protons/test/fixtures/dht.ts b/packages/protons/test/fixtures/dht.ts index 929d3ba..08d949f 100644 --- a/packages/protons/test/fixtures/dht.ts +++ b/packages/protons/test/fixtures/dht.ts @@ -18,13 +18,13 @@ export namespace Record { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'key', codec: bytes, optional: true }, - 2: { name: 'value', codec: bytes, optional: true }, - 3: { name: 'author', codec: bytes, optional: true }, - 4: { name: 'signature', codec: bytes, optional: true }, - 5: { name: 'timeReceived', codec: string, optional: true } - }) + _codec = message([ + { id: 1, name: 'key', codec: bytes, optional: true }, + { id: 2, name: 'value', codec: bytes, optional: true }, + { id: 3, name: 'author', codec: bytes, optional: true }, + { id: 4, name: 'signature', codec: bytes, optional: true }, + { id: 5, name: 'timeReceived', codec: string, optional: true } + ]) } return _codec @@ -104,11 +104,11 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'id', codec: bytes, optional: true }, - 2: { name: 'addrs', codec: bytes, repeats: true }, - 3: { name: 'connection', codec: Message.ConnectionType.codec(), optional: true } - }) + _codec = message([ + { id: 1, name: 'id', codec: bytes, optional: true }, + { id: 2, name: 'addrs', codec: bytes, repeats: true }, + { id: 3, name: 'connection', codec: Message.ConnectionType.codec(), optional: true } + ]) } return _codec @@ -127,14 +127,14 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'type', codec: Message.MessageType.codec(), optional: true }, - 10: { name: 'clusterLevelRaw', codec: int32, optional: true }, - 2: { name: 'key', codec: bytes, optional: true }, - 3: { name: 'record', codec: bytes, optional: true }, - 8: { name: 'closerPeers', codec: Message.Peer.codec(), repeats: true }, - 9: { name: 'providerPeers', codec: Message.Peer.codec(), repeats: true } - }) + _codec = message([ + { id: 1, name: 'type', codec: Message.MessageType.codec(), optional: true }, + { id: 10, name: 'clusterLevelRaw', codec: int32, optional: true }, + { id: 2, name: 'key', codec: bytes, optional: true }, + { id: 3, name: 'record', codec: bytes, optional: true }, + { id: 8, name: 'closerPeers', codec: Message.Peer.codec(), repeats: true }, + { id: 9, name: 'providerPeers', codec: Message.Peer.codec(), repeats: true } + ]) } return _codec diff --git a/packages/protons/test/fixtures/noise.ts b/packages/protons/test/fixtures/noise.ts index 85a21b4..dc6b225 100644 --- a/packages/protons/test/fixtures/noise.ts +++ b/packages/protons/test/fixtures/noise.ts @@ -17,11 +17,11 @@ export namespace pb { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'identityKey', codec: bytes }, - 2: { name: 'identitySig', codec: bytes }, - 3: { name: 'data', codec: bytes } - }) + _codec = message([ + { id: 1, name: 'identityKey', codec: bytes }, + { id: 2, name: 'identitySig', codec: bytes }, + { id: 3, name: 'data', codec: bytes } + ]) } return _codec diff --git a/packages/protons/test/fixtures/peer.ts b/packages/protons/test/fixtures/peer.ts index 33bee0a..39f55cd 100644 --- a/packages/protons/test/fixtures/peer.ts +++ b/packages/protons/test/fixtures/peer.ts @@ -18,13 +18,13 @@ export namespace Peer { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'addresses', codec: Address.codec(), repeats: true }, - 2: { name: 'protocols', codec: string, repeats: true }, - 3: { name: 'metadata', codec: Metadata.codec(), repeats: true }, - 4: { name: 'pubKey', codec: bytes, optional: true }, - 5: { name: 'peerRecordEnvelope', codec: bytes, optional: true } - }) + _codec = message([ + { id: 1, name: 'addresses', codec: Address.codec(), repeats: true }, + { id: 2, name: 'protocols', codec: string, repeats: true }, + { id: 3, name: 'metadata', codec: Metadata.codec(), repeats: true }, + { id: 4, name: 'pubKey', codec: bytes, optional: true }, + { id: 5, name: 'peerRecordEnvelope', codec: bytes, optional: true } + ]) } return _codec @@ -49,10 +49,10 @@ export namespace Address { export const codec = (): Codec
=> { if (_codec == null) { - _codec = message
({ - 1: { name: 'multiaddr', codec: bytes }, - 2: { name: 'isCertified', codec: bool, optional: true } - }) + _codec = message
([ + { id: 1, name: 'multiaddr', codec: bytes }, + { id: 2, name: 'isCertified', codec: bool, optional: true } + ]) } return _codec @@ -77,10 +77,10 @@ export namespace Metadata { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'key', codec: string }, - 2: { name: 'value', codec: bytes } - }) + _codec = message([ + { id: 1, name: 'key', codec: string }, + { id: 2, name: 'value', codec: bytes } + ]) } return _codec diff --git a/packages/protons/test/fixtures/test.ts b/packages/protons/test/fixtures/test.ts index e3c7132..c7e5918 100644 --- a/packages/protons/test/fixtures/test.ts +++ b/packages/protons/test/fixtures/test.ts @@ -29,9 +29,9 @@ export namespace SubMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'foo', codec: string } - }) + _codec = message([ + { id: 1, name: 'foo', codec: string } + ]) } return _codec @@ -72,26 +72,26 @@ export namespace AllTheTypes { export const codec = (): Codec => { if (_codec == null) { - _codec = message({ - 1: { name: 'field1', codec: bool, optional: true }, - 2: { name: 'field2', codec: int32, optional: true }, - 3: { name: 'field3', codec: int64, optional: true }, - 4: { name: 'field4', codec: uint32, optional: true }, - 5: { name: 'field5', codec: uint64, optional: true }, - 6: { name: 'field6', codec: sint32, optional: true }, - 7: { name: 'field7', codec: sint64, optional: true }, - 8: { name: 'field8', codec: double, optional: true }, - 9: { name: 'field9', codec: float, optional: true }, - 10: { name: 'field10', codec: string, optional: true }, - 11: { name: 'field11', codec: bytes, optional: true }, - 12: { name: 'field12', codec: AnEnum.codec(), optional: true }, - 13: { name: 'field13', codec: SubMessage.codec(), optional: true }, - 14: { name: 'field14', codec: string, repeats: true }, - 15: { name: 'field15', codec: fixed32, optional: true }, - 16: { name: 'field16', codec: fixed64, optional: true }, - 17: { name: 'field17', codec: sfixed32, optional: true }, - 18: { name: 'field18', codec: sfixed64, optional: true } - }) + _codec = message([ + { id: 1, name: 'field1', codec: bool, optional: true }, + { id: 2, name: 'field2', codec: int32, optional: true }, + { id: 3, name: 'field3', codec: int64, optional: true }, + { id: 4, name: 'field4', codec: uint32, optional: true }, + { id: 5, name: 'field5', codec: uint64, optional: true }, + { id: 6, name: 'field6', codec: sint32, optional: true }, + { id: 7, name: 'field7', codec: sint64, optional: true }, + { id: 8, name: 'field8', codec: double, optional: true }, + { id: 9, name: 'field9', codec: float, optional: true }, + { id: 10, name: 'field10', codec: string, optional: true }, + { id: 11, name: 'field11', codec: bytes, optional: true }, + { id: 12, name: 'field12', codec: AnEnum.codec(), optional: true }, + { id: 13, name: 'field13', codec: SubMessage.codec(), optional: true }, + { id: 14, name: 'field14', codec: string, repeats: true }, + { id: 15, name: 'field15', codec: fixed32, optional: true }, + { id: 16, name: 'field16', codec: fixed64, optional: true }, + { id: 17, name: 'field17', codec: sfixed32, optional: true }, + { id: 18, name: 'field18', codec: sfixed64, optional: true } + ]) } return _codec From 75cde26b61a9d3f20049629f82da82258cb1e3f6 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sun, 7 Aug 2022 12:47:47 +0100 Subject: [PATCH 04/14] fix: do not re-encode to get length during decode --- packages/protons-runtime/src/codec.ts | 12 ++----- packages/protons-runtime/src/codecs/bool.ts | 13 ++++--- packages/protons-runtime/src/codecs/bytes.ts | 17 +++++---- packages/protons-runtime/src/codecs/double.ts | 15 ++++---- packages/protons-runtime/src/codecs/enum.ts | 13 ++++--- .../protons-runtime/src/codecs/fixed32.ts | 15 ++++---- .../protons-runtime/src/codecs/fixed64.ts | 15 ++++---- packages/protons-runtime/src/codecs/float.ts | 15 ++++---- packages/protons-runtime/src/codecs/int32.ts | 15 +++++--- packages/protons-runtime/src/codecs/int64.ts | 15 +++++--- .../protons-runtime/src/codecs/message.ts | 35 +++++++++---------- .../protons-runtime/src/codecs/sfixed32.ts | 15 ++++---- .../protons-runtime/src/codecs/sfixed64.ts | 15 ++++---- packages/protons-runtime/src/codecs/sint32.ts | 13 ++++--- packages/protons-runtime/src/codecs/sint64.ts | 15 ++++---- packages/protons-runtime/src/codecs/string.ts | 15 ++++---- packages/protons-runtime/src/codecs/uint32.ts | 15 ++++---- packages/protons-runtime/src/codecs/uint64.ts | 15 ++++---- packages/protons-runtime/src/decode.ts | 2 +- packages/protons-runtime/src/index.ts | 2 +- 20 files changed, 149 insertions(+), 138 deletions(-) diff --git a/packages/protons-runtime/src/codec.ts b/packages/protons-runtime/src/codec.ts index e92dbba..612afed 100644 --- a/packages/protons-runtime/src/codec.ts +++ b/packages/protons-runtime/src/codec.ts @@ -15,11 +15,7 @@ export interface EncodeFunction { } export interface DecodeFunction { - (buf: Uint8ArrayList, offset: number): T -} - -export interface EncodingLengthFunction { - (value: T): number + (buf: Uint8ArrayList, offset: number): { value: T, length: number } } export interface Codec { @@ -27,15 +23,13 @@ export interface Codec { type: CODEC_TYPES encode: EncodeFunction decode: DecodeFunction - encodingLength: EncodingLengthFunction } -export function createCodec (name: string, type: CODEC_TYPES, encode: EncodeFunction, decode: DecodeFunction, encodingLength: EncodingLengthFunction): Codec { +export function createCodec (name: string, type: CODEC_TYPES, encode: EncodeFunction, decode: DecodeFunction): Codec { return { name, type, encode, - decode, - encodingLength + decode } } diff --git a/packages/protons-runtime/src/codecs/bool.ts b/packages/protons-runtime/src/codecs/bool.ts index 9073ee1..2a2d7a2 100644 --- a/packages/protons-runtime/src/codecs/bool.ts +++ b/packages/protons-runtime/src/codecs/bool.ts @@ -1,9 +1,5 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' - -const encodingLength: EncodingLengthFunction = function boolEncodingLength () { - return 1 -} +import type { DecodeFunction, EncodeFunction } from '../codec.js' const encode: EncodeFunction = function boolEncode (value) { return { @@ -15,7 +11,10 @@ const encode: EncodeFunction = function boolEncode (value) { } const decode: DecodeFunction = function boolDecode (buffer, offset) { - return buffer.get(offset) > 0 + return { + value: buffer.get(offset) > 0, + length: 1 + } } -export const bool = createCodec('bool', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const bool = createCodec('bool', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/bytes.ts b/packages/protons-runtime/src/codecs/bytes.ts index 9b7f5e6..8cf73b0 100644 --- a/packages/protons-runtime/src/codecs/bytes.ts +++ b/packages/protons-runtime/src/codecs/bytes.ts @@ -1,14 +1,9 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { allocUnsafe } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function bytesEncodingLength (val) { - const len = val.byteLength - return unsigned.encodingLength(len) + len -} - const encode: EncodeFunction = function bytesEncode (val) { const lenLen = unsigned.encodingLength(val.byteLength) const buf = allocUnsafe(lenLen + val.byteLength) @@ -26,9 +21,13 @@ const encode: EncodeFunction = function bytesEncode (val) { const decode: DecodeFunction = function bytesDecode (buf, offset) { const byteLength = unsigned.decode(buf, offset) - offset += unsigned.encodingLength(byteLength) + const byteLengthLength = unsigned.encodingLength(byteLength) + offset += byteLengthLength - return buf.subarray(offset, offset + byteLength) + return { + value: buf.subarray(offset, offset + byteLength), + length: byteLengthLength + byteLength + } } -export const bytes = createCodec('bytes', CODEC_TYPES.LENGTH_DELIMITED, encode, decode, encodingLength) +export const bytes = createCodec('bytes', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) diff --git a/packages/protons-runtime/src/codecs/double.ts b/packages/protons-runtime/src/codecs/double.ts index 354075c..2bd3706 100644 --- a/packages/protons-runtime/src/codecs/double.ts +++ b/packages/protons-runtime/src/codecs/double.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function doubleEncodingLength () { - return 8 -} +const ENCODING_LENGTH = 8 const encode: EncodeFunction = function doubleEncode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setFloat64(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function doubleEncode (val) { } const decode: DecodeFunction = function doubleDecode (buf, offset) { - return buf.getFloat64(offset, true) + return { + value: buf.getFloat64(offset, true), + length: ENCODING_LENGTH + } } -export const double = createCodec('double', CODEC_TYPES.BIT64, encode, decode, encodingLength) +export const double = createCodec('double', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/enum.ts b/packages/protons-runtime/src/codecs/enum.ts index 79c738b..15f5a59 100644 --- a/packages/protons-runtime/src/codecs/enum.ts +++ b/packages/protons-runtime/src/codecs/enum.ts @@ -1,7 +1,7 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction, Codec } from '../codec.js' +import type { DecodeFunction, EncodeFunction, Codec } from '../codec.js' import { allocUnsafe } from 'uint8arrays/alloc' export function enumeration (v: any): Codec { @@ -17,10 +17,6 @@ export function enumeration (v: any): Codec { return v[val] } - const encodingLength: EncodingLengthFunction = function enumEncodingLength (val) { - return unsigned.encodingLength(findValue(val)) - } - const encode: EncodeFunction = function enumEncode (val) { const enumValue = findValue(val) @@ -45,9 +41,12 @@ export function enumeration (v: any): Codec { throw new Error('Invalid enum value') } - return v[strValue] + return { + value: v[strValue], + length: unsigned.encodingLength(value) + } } // @ts-expect-error yeah yeah - return createCodec('enum', CODEC_TYPES.VARINT, encode, decode, encodingLength) + return createCodec('enum', CODEC_TYPES.VARINT, encode, decode) } diff --git a/packages/protons-runtime/src/codecs/fixed32.ts b/packages/protons-runtime/src/codecs/fixed32.ts index f4ff908..dc1f487 100644 --- a/packages/protons-runtime/src/codecs/fixed32.ts +++ b/packages/protons-runtime/src/codecs/fixed32.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function fixed32EncodingLength () { - return 4 -} +const ENCODING_LENGTH = 4 const encode: EncodeFunction = function fixed32Encode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setInt32(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function fixed32Encode (val) { } const decode: DecodeFunction = function fixed32Decode (buf, offset) { - return buf.getInt32(offset, true) + return { + value: buf.getInt32(offset, true), + length: ENCODING_LENGTH + } } -export const fixed32 = createCodec('fixed32', CODEC_TYPES.BIT32, encode, decode, encodingLength) +export const fixed32 = createCodec('fixed32', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/fixed64.ts b/packages/protons-runtime/src/codecs/fixed64.ts index a576fc6..eca0845 100644 --- a/packages/protons-runtime/src/codecs/fixed64.ts +++ b/packages/protons-runtime/src/codecs/fixed64.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function int64EncodingLength (val) { - return 8 -} +const ENCODING_LENGTH = 8 const encode: EncodeFunction = function int64Encode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setBigInt64(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function int64Encode (val) { } const decode: DecodeFunction = function int64Decode (buf, offset) { - return buf.getBigInt64(offset, true) + return { + value: buf.getBigInt64(offset, true), + length: ENCODING_LENGTH + } } -export const fixed64 = createCodec('fixed64', CODEC_TYPES.BIT64, encode, decode, encodingLength) +export const fixed64 = createCodec('fixed64', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/float.ts b/packages/protons-runtime/src/codecs/float.ts index 1542235..d1b2519 100644 --- a/packages/protons-runtime/src/codecs/float.ts +++ b/packages/protons-runtime/src/codecs/float.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function floatEncodingLength () { - return 4 -} +const ENCODING_LENGTH = 4 const encode: EncodeFunction = function floatEncode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setFloat32(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function floatEncode (val) { } const decode: DecodeFunction = function floatDecode (buf, offset) { - return buf.getFloat32(offset, true) + return { + value: buf.getFloat32(offset, true), + length: ENCODING_LENGTH + } } -export const float = createCodec('float', CODEC_TYPES.BIT32, encode, decode, encodingLength) +export const float = createCodec('float', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/int32.ts b/packages/protons-runtime/src/codecs/int32.ts index 078096b..1a1142f 100644 --- a/packages/protons-runtime/src/codecs/int32.ts +++ b/packages/protons-runtime/src/codecs/int32.ts @@ -1,9 +1,9 @@ import { signed } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function int32EncodingLength (val) { +function int32EncodingLength (val: number): number { if (val < 0) { return 10 // 10 bytes per spec - https://developers.google.com/protocol-buffers/docs/encoding#signed-ints } @@ -12,7 +12,7 @@ const encodingLength: EncodingLengthFunction = function int32EncodingLen } const encode: EncodeFunction = function int32Encode (val) { - const buf = signed.encode(val, alloc(encodingLength(val))) + const buf = signed.encode(val, alloc(int32EncodingLength(val))) return { bufs: [ @@ -23,7 +23,12 @@ const encode: EncodeFunction = function int32Encode (val) { } const decode: DecodeFunction = function int32Decode (buf, offset) { - return signed.decode(buf, offset) | 0 + const value = signed.decode(buf, offset) | 0 + + return { + value, + length: int32EncodingLength(value) + } } -export const int32 = createCodec('int32', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const int32 = createCodec('int32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/int64.ts b/packages/protons-runtime/src/codecs/int64.ts index bedcecf..534bd8d 100644 --- a/packages/protons-runtime/src/codecs/int64.ts +++ b/packages/protons-runtime/src/codecs/int64.ts @@ -1,9 +1,9 @@ import { signed } from 'uint8-varint/big' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function int64EncodingLength (val) { +function int64EncodingLength (val: bigint): number { if (val < 0n) { return 10 // 10 bytes per spec - https://developers.google.com/protocol-buffers/docs/encoding#signed-ints } @@ -12,7 +12,7 @@ const encodingLength: EncodingLengthFunction = function int64EncodingLen } const encode: EncodeFunction = function int64Encode (val) { - const buf = signed.encode(val, alloc(encodingLength(val))) + const buf = signed.encode(val, alloc(int64EncodingLength(val))) return { bufs: [ @@ -23,7 +23,12 @@ const encode: EncodeFunction = function int64Encode (val) { } const decode: DecodeFunction = function int64Decode (buf, offset) { - return signed.decode(buf, offset) | 0n + const value = signed.decode(buf, offset) | 0n + + return { + value, + length: int64EncodingLength(value) + } } -export const int64 = createCodec('int64', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const int64 = createCodec('int64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index 630bf4f..bb067fc 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -1,6 +1,6 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction, Codec } from '../codec.js' +import type { DecodeFunction, EncodeFunction, Codec } from '../codec.js' import type { FieldDef } from '../index.js' export interface Factory { @@ -14,18 +14,6 @@ export function message (fieldDefs: FieldDef[]): Codec { fieldDefLookup[def.id] = def } - const encodingLength: EncodingLengthFunction = function messageEncodingLength (val: Record) { - let length = 0 - - for (let i = 0; i < fieldDefs.length; i++) { - const fieldDef = fieldDefs[i] - - length += fieldDef.codec.encodingLength(val[fieldDef.name]) - } - - return unsigned.encodingLength(length) + length - } - const encode: EncodeFunction> = function messageEncode (val) { const bufs: Uint8Array[] = [ new Uint8Array(0) // will hold length prefix @@ -79,7 +67,8 @@ export function message (fieldDefs: FieldDef[]): Codec { const decode: DecodeFunction = function messageDecode (buffer, offset) { const length = unsigned.decode(buffer, offset) - offset += unsigned.encodingLength(length) + const lengthLength = unsigned.encodingLength(length) + offset += lengthLength const end = offset + length const fields: any = {} @@ -91,12 +80,14 @@ export function message (fieldDefs: FieldDef[]): Codec { const fieldNumber = key >> 3 const fieldDef = fieldDefLookup[fieldNumber] let fieldLength = 0 + let value if (wireType === CODEC_TYPES.VARINT) { if (fieldDef != null) { // use the codec if it is available as this could be a bigint - const value = fieldDef.codec.decode(buffer, offset) - fieldLength = fieldDef.codec.encodingLength(value) + const decoded = fieldDef.codec.decode(buffer, offset) + fieldLength = decoded.length + value = decoded.value } else { const value = unsigned.decode(buffer, offset) fieldLength = unsigned.encodingLength(value) @@ -115,7 +106,10 @@ export function message (fieldDefs: FieldDef[]): Codec { } if (fieldDef != null) { - const value = fieldDef.codec.decode(buffer, offset) + if (value == null) { + const decoded = fieldDef.codec.decode(buffer, offset) + value = decoded.value + } if (fieldDef.repeats === true) { if (fields[fieldDef.name] == null) { @@ -140,8 +134,11 @@ export function message (fieldDefs: FieldDef[]): Codec { } } - return fields + return { + value: fields, + length: lengthLength + length + } } - return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode, encodingLength) + return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) } diff --git a/packages/protons-runtime/src/codecs/sfixed32.ts b/packages/protons-runtime/src/codecs/sfixed32.ts index 2ec6eae..90a5e48 100644 --- a/packages/protons-runtime/src/codecs/sfixed32.ts +++ b/packages/protons-runtime/src/codecs/sfixed32.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function sfixed32EncodingLength () { - return 4 -} +const ENCODING_LENGTH = 4 const encode: EncodeFunction = function sfixed32Encode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setInt32(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function sfixed32Encode (val) { } const decode: DecodeFunction = function sfixed32Decode (buf, offset) { - return buf.getInt32(offset, true) + return { + value: buf.getInt32(offset, true), + length: ENCODING_LENGTH + } } -export const sfixed32 = createCodec('sfixed32', CODEC_TYPES.BIT32, encode, decode, encodingLength) +export const sfixed32 = createCodec('sfixed32', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sfixed64.ts b/packages/protons-runtime/src/codecs/sfixed64.ts index 978932d..7f5418a 100644 --- a/packages/protons-runtime/src/codecs/sfixed64.ts +++ b/packages/protons-runtime/src/codecs/sfixed64.ts @@ -1,13 +1,11 @@ import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' import { alloc } from 'uint8arrays/alloc' -const encodingLength: EncodingLengthFunction = function sfixed64EncodingLength () { - return 8 -} +const ENCODING_LENGTH = 8 const encode: EncodeFunction = function sfixed64Encode (val) { - const buf = alloc(encodingLength(val)) + const buf = alloc(ENCODING_LENGTH) const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) view.setBigInt64(0, val, true) @@ -20,7 +18,10 @@ const encode: EncodeFunction = function sfixed64Encode (val) { } const decode: DecodeFunction = function sfixed64Decode (buf, offset) { - return buf.getBigInt64(offset, true) + return { + value: buf.getBigInt64(offset, true), + length: ENCODING_LENGTH + } } -export const sfixed64 = createCodec('sfixed64', CODEC_TYPES.BIT64, encode, decode, encodingLength) +export const sfixed64 = createCodec('sfixed64', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sint32.ts b/packages/protons-runtime/src/codecs/sint32.ts index c36cd42..fed6906 100644 --- a/packages/protons-runtime/src/codecs/sint32.ts +++ b/packages/protons-runtime/src/codecs/sint32.ts @@ -1,8 +1,8 @@ import { zigzag } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' +import type { DecodeFunction, EncodeFunction } from '../codec.js' -const encodingLength: EncodingLengthFunction = function sint32EncodingLength (val) { +function sint32EncodingLength (val: number) { return zigzag.encodingLength(val) } @@ -18,7 +18,12 @@ const encode: EncodeFunction = function svarintEncode (val) { } const decode: DecodeFunction = function svarintDecode (buf, offset) { - return zigzag.decode(buf, offset) + const value = zigzag.decode(buf, offset) + + return { + value, + length: sint32EncodingLength(value) + } } -export const sint32 = createCodec('sint32', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const sint32 = createCodec('sint32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sint64.ts b/packages/protons-runtime/src/codecs/sint64.ts index 553d915..73e143e 100644 --- a/packages/protons-runtime/src/codecs/sint64.ts +++ b/packages/protons-runtime/src/codecs/sint64.ts @@ -1,10 +1,6 @@ import { zigzag } from 'uint8-varint/big' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' - -const encodingLength: EncodingLengthFunction = function int64EncodingLength (val) { - return zigzag.encodingLength(val) -} +import type { DecodeFunction, EncodeFunction } from '../codec.js' const encode: EncodeFunction = function int64Encode (val) { const buf = zigzag.encode(val) @@ -18,7 +14,12 @@ const encode: EncodeFunction = function int64Encode (val) { } const decode: DecodeFunction = function int64Decode (buf, offset) { - return zigzag.decode(buf, offset) + const value = zigzag.decode(buf, offset) + + return { + value, + length: zigzag.encodingLength(value) + } } -export const sint64 = createCodec('sint64', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const sint64 = createCodec('sint64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/string.ts b/packages/protons-runtime/src/codecs/string.ts index b206b59..f3ef2bb 100644 --- a/packages/protons-runtime/src/codecs/string.ts +++ b/packages/protons-runtime/src/codecs/string.ts @@ -2,12 +2,7 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import { allocUnsafe } from 'uint8arrays/alloc' import * as utf8 from '../utils/utf8.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' - -const encodingLength: EncodingLengthFunction = function stringEncodingLength (val) { - const len = utf8.length(val) - return unsigned.encodingLength(len) + len -} +import type { DecodeFunction, EncodeFunction } from '../codec.js' const encode: EncodeFunction = function stringEncode (val) { const strLen = utf8.length(val) @@ -29,8 +24,12 @@ const decode: DecodeFunction = function stringDecode (buf, offset) { const strLen = unsigned.decode(buf, offset) offset += unsigned.encodingLength(strLen) const b = buf.subarray(offset, offset + strLen) + const value = utf8.read(b, 0, b.byteLength) - return utf8.read(b, 0, b.byteLength) + return { + value, + length: unsigned.encodingLength(strLen) + strLen + } } -export const string = createCodec('string', CODEC_TYPES.LENGTH_DELIMITED, encode, decode, encodingLength) +export const string = createCodec('string', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) diff --git a/packages/protons-runtime/src/codecs/uint32.ts b/packages/protons-runtime/src/codecs/uint32.ts index 5b45217..566b755 100644 --- a/packages/protons-runtime/src/codecs/uint32.ts +++ b/packages/protons-runtime/src/codecs/uint32.ts @@ -1,10 +1,6 @@ import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' - -const encodingLength: EncodingLengthFunction = function uint32EncodingLength (val) { - return unsigned.encodingLength(val) -} +import type { DecodeFunction, EncodeFunction } from '../codec.js' const encode: EncodeFunction = function uint32Encode (val) { const buf = unsigned.encode(val) @@ -18,7 +14,12 @@ const encode: EncodeFunction = function uint32Encode (val) { } const decode: DecodeFunction = function uint32Decode (buf, offset) { - return unsigned.decode(buf, offset) + const value = unsigned.decode(buf, offset) + + return { + value, + length: unsigned.encodingLength(value) + } } -export const uint32 = createCodec('uint32', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const uint32 = createCodec('uint32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/uint64.ts b/packages/protons-runtime/src/codecs/uint64.ts index b61d2b6..2e83664 100644 --- a/packages/protons-runtime/src/codecs/uint64.ts +++ b/packages/protons-runtime/src/codecs/uint64.ts @@ -1,10 +1,6 @@ import { unsigned } from 'uint8-varint/big' import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, EncodingLengthFunction } from '../codec.js' - -const encodingLength: EncodingLengthFunction = function uint64EncodingLength (val) { - return unsigned.encodingLength(val) -} +import type { DecodeFunction, EncodeFunction } from '../codec.js' const encode: EncodeFunction = function uint64Encode (val) { const buf = unsigned.encode(val) @@ -18,7 +14,12 @@ const encode: EncodeFunction = function uint64Encode (val) { } const decode: DecodeFunction = function uint64Decode (buf, offset) { - return unsigned.decode(buf, offset) + const value = unsigned.decode(buf, offset) + + return { + value, + length: unsigned.encodingLength(value) + } } -export const uint64 = createCodec('uint64', CODEC_TYPES.VARINT, encode, decode, encodingLength) +export const uint64 = createCodec('uint64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/decode.ts b/packages/protons-runtime/src/decode.ts index 6384bc7..81839fa 100644 --- a/packages/protons-runtime/src/decode.ts +++ b/packages/protons-runtime/src/decode.ts @@ -8,5 +8,5 @@ export function decodeMessage (buf: Uint8Array | Uint8ArrayList, codec: Code const prefix = allocUnsafe(unsigned.encodingLength(buf.byteLength)) unsigned.encode(buf.byteLength, prefix) - return codec.decode(new Uint8ArrayList(prefix, buf), 0) + return codec.decode(new Uint8ArrayList(prefix, buf), 0).value } diff --git a/packages/protons-runtime/src/index.ts b/packages/protons-runtime/src/index.ts index b3b83d5..0974c61 100644 --- a/packages/protons-runtime/src/index.ts +++ b/packages/protons-runtime/src/index.ts @@ -1,7 +1,7 @@ import type { Codec } from './codec.js' export interface FieldDef { - id: number, + id: number name: string codec: Codec optional?: true From 2f49ef4ad7858db1eae25c7f2d7af750bdbf38d5 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sun, 7 Aug 2022 16:01:25 +0100 Subject: [PATCH 05/14] chore: update test --- packages/protons-runtime/src/encode.ts | 15 ++++----------- packages/protons/package.json | 1 + packages/protons/test/index.spec.ts | 6 ++---- 3 files changed, 7 insertions(+), 15 deletions(-) diff --git a/packages/protons-runtime/src/encode.ts b/packages/protons-runtime/src/encode.ts index fb65c44..74c78fe 100644 --- a/packages/protons-runtime/src/encode.ts +++ b/packages/protons-runtime/src/encode.ts @@ -1,20 +1,13 @@ import { Uint8ArrayList } from 'uint8arraylist' import type { Codec } from './codec.js' +import { unsigned } from 'uint8-varint' export function encodeMessage (message: T, codec: Codec): Uint8ArrayList { const encoded = codec.encode(message) const list = Uint8ArrayList.fromUint8Arrays(encoded.bufs, encoded.length) // unwrap root message - it is prefixed by a varint so skip those bytes - let skip = 0 - for (let i = 0; i < list.byteLength; i++) { - skip = i + 1 - - // when the MSB is not 1, there are no more bytes in this varint - if ((list.get(i) & 0x80) !== 0x80) { - break - } - } - - return list.sublist(skip) + const length = unsigned.decode(list) + const lengthLength = unsigned.encodingLength(length) + return list.sublist(lengthLength) } diff --git a/packages/protons/package.json b/packages/protons/package.json index e1d3b4b..4137c5a 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -158,6 +158,7 @@ }, "devDependencies": { "aegir": "^37.0.5", + "long": "^5.2.0", "pbjs": "^0.0.14", "protons-runtime": "^2.0.0", "uint8arraylist": "^2.3.1" diff --git a/packages/protons/test/index.spec.ts b/packages/protons/test/index.spec.ts index 1ca67d8..d333db5 100644 --- a/packages/protons/test/index.spec.ts +++ b/packages/protons/test/index.spec.ts @@ -9,8 +9,7 @@ import fs from 'fs' import protobufjs, { Type as PBType } from 'protobufjs' import { Peer } from './fixtures/peer.js' import { CircuitRelay } from './fixtures/circuit.js' - -const Long = protobufjs.util.Long +import long from 'long' function longifyBigInts (obj: any) { const output = { @@ -19,8 +18,7 @@ function longifyBigInts (obj: any) { for (const key of Object.keys(output)) { if (typeof output[key] === 'bigint') { - // @ts-expect-error exported types are incomplete - output[key] = Long.fromString(`${output[key].toString()}`) + output[key] = long.fromString(`${output[key].toString()}`) } } From 6ac7d5fd246d9f73197e49fe1827a3ae17ecc809 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 8 Aug 2022 09:47:13 +0100 Subject: [PATCH 06/14] fix: do not lp encode if not necessary --- packages/protons-runtime/src/codec.ts | 6 ++++- .../protons-runtime/src/codecs/message.ts | 25 +++++++++++++------ packages/protons-runtime/src/encode.ts | 11 +++----- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/packages/protons-runtime/src/codec.ts b/packages/protons-runtime/src/codec.ts index 612afed..03e4d6c 100644 --- a/packages/protons-runtime/src/codec.ts +++ b/packages/protons-runtime/src/codec.ts @@ -10,8 +10,12 @@ export enum CODEC_TYPES { BIT32 } +export interface EncodeOptions { + lengthDelimited?: boolean +} + export interface EncodeFunction { - (value: T): { bufs: Uint8Array[], length: number } + (value: T, opts?: EncodeOptions): { bufs: Uint8Array[], length: number } } export interface DecodeFunction { diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index bb067fc..3d984c0 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -14,10 +14,14 @@ export function message (fieldDefs: FieldDef[]): Codec { fieldDefLookup[def.id] = def } - const encode: EncodeFunction> = function messageEncode (val) { - const bufs: Uint8Array[] = [ - new Uint8Array(0) // will hold length prefix - ] + const encode: EncodeFunction> = function messageEncode (val, opts = {}) { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited === true) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + let length = 0 function encodeValue (value: any, fieldNumber: number, fieldDef: FieldDef) { @@ -54,10 +58,17 @@ export function message (fieldDefs: FieldDef[]): Codec { } } - const prefix = unsigned.encode(length) + if (opts.lengthDelimited === true) { + const prefix = unsigned.encode(length) - bufs[0] = prefix - length += prefix.byteLength + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } return { bufs, diff --git a/packages/protons-runtime/src/encode.ts b/packages/protons-runtime/src/encode.ts index 74c78fe..78b541c 100644 --- a/packages/protons-runtime/src/encode.ts +++ b/packages/protons-runtime/src/encode.ts @@ -1,13 +1,10 @@ import { Uint8ArrayList } from 'uint8arraylist' import type { Codec } from './codec.js' -import { unsigned } from 'uint8-varint' export function encodeMessage (message: T, codec: Codec): Uint8ArrayList { - const encoded = codec.encode(message) - const list = Uint8ArrayList.fromUint8Arrays(encoded.bufs, encoded.length) + const encoded = codec.encode(message, { + lengthDelimited: false + }) - // unwrap root message - it is prefixed by a varint so skip those bytes - const length = unsigned.decode(list) - const lengthLength = unsigned.encodingLength(length) - return list.sublist(lengthLength) + return Uint8ArrayList.fromUint8Arrays(encoded.bufs, encoded.length) } From f6f1eab70446a8dcca08617eecb8da55147af9da Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 8 Aug 2022 09:48:57 +0100 Subject: [PATCH 07/14] chore: split return types --- packages/protons-runtime/src/codec.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/protons-runtime/src/codec.ts b/packages/protons-runtime/src/codec.ts index 03e4d6c..23d1ccc 100644 --- a/packages/protons-runtime/src/codec.ts +++ b/packages/protons-runtime/src/codec.ts @@ -14,12 +14,22 @@ export interface EncodeOptions { lengthDelimited?: boolean } +export interface EncodeResult { + bufs: Uint8Array[] + length: number +} + export interface EncodeFunction { - (value: T, opts?: EncodeOptions): { bufs: Uint8Array[], length: number } + (value: T, opts?: EncodeOptions): EncodeResult +} + +export interface DecodeResult { + value: T + length: number } export interface DecodeFunction { - (buf: Uint8ArrayList, offset: number): { value: T, length: number } + (buf: Uint8ArrayList, offset: number): DecodeResult } export interface Codec { From 18ee2dd644a44f4befec5a2aafb76158109ca4c8 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 8 Aug 2022 10:48:40 +0100 Subject: [PATCH 08/14] fix: do not loop in message encoder --- .../protons-benchmark/src/protons/bench.ts | 247 +++- packages/protons-runtime/package.json | 2 +- .../protons-runtime/src/codecs/message.ts | 83 +- packages/protons-runtime/src/index.ts | 3 +- packages/protons/package.json | 2 +- packages/protons/src/index.ts | 125 +- packages/protons/test/fixtures/basic.ts | 53 +- packages/protons/test/fixtures/circuit.ts | 129 +- packages/protons/test/fixtures/daemon.ts | 1219 +++++++++++++++-- packages/protons/test/fixtures/dht.ts | 243 +++- packages/protons/test/fixtures/noise.ts | 63 +- packages/protons/test/fixtures/peer.ts | 189 ++- packages/protons/test/fixtures/test.ts | 257 +++- packages/protons/tsconfig.json | 3 + 14 files changed, 2383 insertions(+), 235 deletions(-) diff --git a/packages/protons-benchmark/src/protons/bench.ts b/packages/protons-benchmark/src/protons/bench.ts index b0055e4..2020a93 100644 --- a/packages/protons-benchmark/src/protons/bench.ts +++ b/packages/protons-benchmark/src/protons/bench.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { encodeMessage, decodeMessage, message, uint32, enumeration, string, bytes } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface Foo { baz: number @@ -14,9 +15,43 @@ export namespace Foo { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'baz', codec: uint32 } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $baz = obj.baz + if ($baz != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = uint32.encode($baz) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'baz', codec: uint32 } + }) } return _codec @@ -40,9 +75,43 @@ export namespace Bar { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'tmp', codec: Foo.codec() } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $tmp = obj.tmp + if ($tmp != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = Foo.codec().encode($tmp) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'tmp', codec: Foo.codec() } + }) } return _codec @@ -69,7 +138,7 @@ enum __FOOValues { export namespace FOO { export const codec = () => { - return enumeration(__FOOValues) + return enumeration(__FOOValues) } } export interface Yo { @@ -81,9 +150,45 @@ export namespace Yo { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'lol', codec: FOO.codec(), repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $lol = obj.lol + if ($lol != null) { + for (const value of $lol) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = FOO.codec().encode(value) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'lol', codec: FOO.codec(), repeats: true } + }) } return _codec @@ -108,10 +213,52 @@ export namespace Lol { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'lol', codec: string }, - { id: 2, name: 'b', codec: Bar.codec() } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $lol = obj.lol + if ($lol != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($lol) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $b = obj.b + if ($b != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = Bar.codec().encode($b) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'lol', codec: string }, + '2': { name: 'b', codec: Bar.codec() } + }) } return _codec @@ -138,12 +285,70 @@ export namespace Test { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 6, name: 'meh', codec: Lol.codec() }, - { id: 3, name: 'hello', codec: uint32 }, - { id: 1, name: 'foo', codec: string }, - { id: 7, name: 'payload', codec: bytes } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $meh = obj.meh + if ($meh != null) { + const prefixField6 = Uint8Array.from([50]) + const encodedField6 = Lol.codec().encode($meh) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + + const $hello = obj.hello + if ($hello != null) { + const prefixField3 = Uint8Array.from([24]) + const encodedField3 = uint32.encode($hello) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $foo = obj.foo + if ($foo != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($foo) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $payload = obj.payload + if ($payload != null) { + const prefixField7 = Uint8Array.from([58]) + const encodedField7 = bytes.encode($payload) + bufs.push(prefixField7, ...encodedField7.bufs) + length += prefixField7.byteLength + encodedField7.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '6': { name: 'meh', codec: Lol.codec() }, + '3': { name: 'hello', codec: uint32 }, + '1': { name: 'foo', codec: string }, + '7': { name: 'payload', codec: bytes } + }) } return _codec diff --git a/packages/protons-runtime/package.json b/packages/protons-runtime/package.json index dae8399..4606b3b 100644 --- a/packages/protons-runtime/package.json +++ b/packages/protons-runtime/package.json @@ -151,7 +151,7 @@ "byte-access": "^1.0.1", "longbits": "^1.1.0", "uint8-varint": "^1.0.2", - "uint8arraylist": "^2.3.1", + "uint8arraylist": "^2.3.2", "uint8arrays": "^3.1.0" }, "devDependencies": { diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index 3d984c0..d3a20d0 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -1,12 +1,90 @@ import { unsigned } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction, Codec } from '../codec.js' +import { createCodec, CODEC_TYPES, EncodeResult, EncodeOptions } from '../codec.js' +import type { DecodeFunction, Codec } from '../codec.js' import type { FieldDef } from '../index.js' export interface Factory { new (obj: A): T } +export function message (encode: (obj: T, opts?: EncodeOptions) => EncodeResult, fieldDefs: Record): Codec { + const decode: DecodeFunction = function messageDecode (buffer, offset) { + const length = unsigned.decode(buffer, offset) + const lengthLength = unsigned.encodingLength(length) + offset += lengthLength + const end = offset + length + const fields: any = {} + + while (offset < end) { + const key = unsigned.decode(buffer, offset) + offset += unsigned.encodingLength(key) + + const wireType = key & 0x7 + const fieldNumber = key >> 3 + const fieldDef = fieldDefs[fieldNumber.toString()] + let fieldLength = 0 + let value + + if (wireType === CODEC_TYPES.VARINT) { + if (fieldDef != null) { + // use the codec if it is available as this could be a bigint + const decoded = fieldDef.codec.decode(buffer, offset) + fieldLength = decoded.length + value = decoded.value + } else { + const value = unsigned.decode(buffer, offset) + fieldLength = unsigned.encodingLength(value) + } + } else if (wireType === CODEC_TYPES.BIT64) { + fieldLength = 8 + } else if (wireType === CODEC_TYPES.LENGTH_DELIMITED) { + const valueLength = unsigned.decode(buffer, offset) + fieldLength = valueLength + unsigned.encodingLength(valueLength) + } else if (wireType === CODEC_TYPES.BIT32) { + fieldLength = 4 + } else if (wireType === CODEC_TYPES.START_GROUP) { + throw new Error('Unsupported wire type START_GROUP') + } else if (wireType === CODEC_TYPES.END_GROUP) { + throw new Error('Unsupported wire type END_GROUP') + } + + if (fieldDef != null) { + if (value == null) { + const decoded = fieldDef.codec.decode(buffer, offset) + value = decoded.value + } + + if (fieldDef.repeats === true) { + if (fields[fieldDef.name] == null) { + fields[fieldDef.name] = [] + } + + fields[fieldDef.name].push(value) + } else { + fields[fieldDef.name] = value + } + } + + offset += fieldLength + } + + // make sure repeated fields have an array if not set + for (const fieldDef of Object.values(fieldDefs)) { + if (fieldDef.repeats === true && fields[fieldDef.name] == null) { + fields[fieldDef.name] = [] + } + } + + return { + value: fields, + length: lengthLength + length + } + } + + return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) +} + +/* export function message (fieldDefs: FieldDef[]): Codec { // create a id => FieldDef mapping for quick access const fieldDefLookup: Record = {} @@ -153,3 +231,4 @@ export function message (fieldDefs: FieldDef[]): Codec { return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) } +*/ diff --git a/packages/protons-runtime/src/index.ts b/packages/protons-runtime/src/index.ts index 0974c61..6d24911 100644 --- a/packages/protons-runtime/src/index.ts +++ b/packages/protons-runtime/src/index.ts @@ -1,7 +1,6 @@ import type { Codec } from './codec.js' export interface FieldDef { - id: number name: string codec: Codec optional?: true @@ -34,4 +33,4 @@ export { sint64 } from './codecs/sint64.js' export { string } from './codecs/string.js' export { uint32 } from './codecs/uint32.js' export { uint64 } from './codecs/uint64.js' -export type { Codec } from './codec.js' +export type { Codec, EncodeOptions } from './codec.js' diff --git a/packages/protons/package.json b/packages/protons/package.json index 4137c5a..ef5c2b7 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -161,6 +161,6 @@ "long": "^5.2.0", "pbjs": "^0.0.14", "protons-runtime": "^2.0.0", - "uint8arraylist": "^2.3.1" + "uint8arraylist": "^2.3.2" } } diff --git a/packages/protons/src/index.ts b/packages/protons/src/index.ts index 52749ef..9046380 100644 --- a/packages/protons/src/index.ts +++ b/packages/protons/src/index.ts @@ -2,6 +2,16 @@ import { main as pbjs } from 'protobufjs/cli/pbjs.js' import path from 'path' import { promisify } from 'util' import fs from 'fs/promises' +import { unsigned } from 'uint8-varint' + +export enum CODEC_TYPES { + VARINT = 0, + BIT64, + LENGTH_DELIMITED, + START_GROUP, + END_GROUP, + BIT32 +} function pathWithExtension (input: string, extension: string, outputDir?: string) { const output = outputDir ?? path.dirname(input) @@ -72,20 +82,40 @@ function findDef (typeName: string, classDef: MessageDef, moduleDef: ModuleDef): const encoders: Record = { bool: 'bool', - double: 'double', bytes: 'bytes', + double: 'double', fixed32: 'fixed32', fixed64: 'fixed64', float: 'float', int32: 'int32', int64: 'int64', + sfixed32: 'sfixed32', + sfixed64: 'sfixed64', sint32: 'sint32', sint64: 'sint64', string: 'string', uint32: 'uint32', - uint64: 'uint64', - sfixed32: 'sfixed32', - sfixed64: 'sfixed64' + uint64: 'uint64' +} + +const codecTypes: Record = { + bool: CODEC_TYPES.VARINT, + bytes: CODEC_TYPES.LENGTH_DELIMITED, + double: CODEC_TYPES.BIT64, + enum: CODEC_TYPES.VARINT, + fixed32: CODEC_TYPES.BIT32, + fixed64: CODEC_TYPES.BIT64, + float: CODEC_TYPES.BIT32, + int32: CODEC_TYPES.VARINT, + int64: CODEC_TYPES.VARINT, + message: CODEC_TYPES.LENGTH_DELIMITED, + sfixed32: CODEC_TYPES.BIT32, + sfixed64: CODEC_TYPES.BIT64, + sint32: CODEC_TYPES.VARINT, + sint64: CODEC_TYPES.VARINT, + string: CODEC_TYPES.LENGTH_DELIMITED, + uint32: CODEC_TYPES.VARINT, + uint64: CODEC_TYPES.VARINT } interface ClassDef { @@ -148,7 +178,7 @@ enum __${messageDef.name}Values { export namespace ${messageDef.name} { export const codec = () => { - return enumeration(__${messageDef.name}Values) + return enumeration<${messageDef.name}>(__${messageDef.name}Values) } }`.trim() } @@ -194,7 +224,77 @@ export interface ${messageDef.name} { export const codec = (): Codec<${messageDef.name}> => { if (_codec == null) { - _codec = message<${messageDef.name}>([ + _codec = message<${messageDef.name}>((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + ${Object.entries(fields) + .map(([name, fieldDef]) => { + let codec = encoders[fieldDef.type] + let type: string = fieldDef.type + + if (codec == null) { + const def = findDef(fieldDef.type, messageDef, moduleDef) + + if (isEnumDef(def)) { + moduleDef.imports.add('enumeration') + type = 'enum' + } else { + moduleDef.imports.add('message') + type = 'message' + } + + const typeName = findTypeName(fieldDef.type, messageDef, moduleDef) + codec = `${typeName}.codec()` + } else { + moduleDef.imports.add(codec) + } + + if (fieldDef.rule === 'repeated') { + return ` + const $${name} = obj.${name} + if ($${name} != null) { + for (const value of $${name}) { + const prefixField${fieldDef.id} = Uint8Array.from([${unsigned.encode((fieldDef.id << 3) | codecTypes[type]).join(', ')}]) + const encodedField${fieldDef.id} = ${codec}.encode(value) + bufs.push(prefixField${fieldDef.id}, ...encodedField${fieldDef.id}.bufs) + length += prefixField${fieldDef.id}.byteLength + encodedField${fieldDef.id}.length + } + }` + } + + return ` + const $${name} = obj.${name} + if ($${name} != null) { + const prefixField${fieldDef.id} = Uint8Array.from([${unsigned.encode((fieldDef.id << 3) | codecTypes[type]).join(', ')}]) + const encodedField${fieldDef.id} = ${codec}.encode($${name}) + bufs.push(prefixField${fieldDef.id}, ...encodedField${fieldDef.id}.bufs) + length += prefixField${fieldDef.id}.byteLength + encodedField${fieldDef.id}.length + }` + }).join('\n')} + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { ${Object.entries(fields) .map(([name, fieldDef]) => { let codec = encoders[fieldDef.type] @@ -214,9 +314,9 @@ export interface ${messageDef.name} { moduleDef.imports.add(codec) } - return `{ id: ${fieldDef.id}, name: '${name}', codec: ${codec}${fieldDef.options?.proto3_optional === true ? ', optional: true' : ''}${fieldDef.rule === 'repeated' ? ', repeats: true' : ''} }` + return `'${fieldDef.id}': { name: '${name}', codec: ${codec}${fieldDef.options?.proto3_optional === true ? ', optional: true' : ''}${fieldDef.rule === 'repeated' ? ', repeats: true' : ''} }` }).join(',\n ')} - ]) + }) } return _codec @@ -317,12 +417,13 @@ export async function generate (source: string, flags: Flags) { lines.push(`import { ${Array.from(moduleDef.imports).join(', ')} } from 'protons-runtime'`) } - if (moduleDef.importedTypes.size > 0) { - lines.push(`import type { ${Array.from(moduleDef.importedTypes).join(', ')} } from 'protons-runtime'`) - } - if (moduleDef.imports.has('encodeMessage')) { lines.push("import type { Uint8ArrayList } from 'uint8arraylist'") + lines.push("import { unsigned } from 'uint8-varint'") + } + + if (moduleDef.importedTypes.size > 0) { + lines.push(`import type { ${Array.from(moduleDef.importedTypes).join(', ')} } from 'protons-runtime'`) } lines = [ diff --git a/packages/protons/test/fixtures/basic.ts b/packages/protons/test/fixtures/basic.ts index bc26c7b..60d4d56 100644 --- a/packages/protons/test/fixtures/basic.ts +++ b/packages/protons/test/fixtures/basic.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { encodeMessage, decodeMessage, message, string, int32 } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface Basic { foo?: string @@ -15,10 +16,52 @@ export namespace Basic { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'foo', codec: string, optional: true }, - { id: 2, name: 'num', codec: int32 } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $foo = obj.foo + if ($foo != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($foo) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $num = obj.num + if ($num != null) { + const prefixField2 = Uint8Array.from([16]) + const encodedField2 = int32.encode($num) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'foo', codec: string, optional: true }, + '2': { name: 'num', codec: int32 } + }) } return _codec diff --git a/packages/protons/test/fixtures/circuit.ts b/packages/protons/test/fixtures/circuit.ts index 2582bac..37a52da 100644 --- a/packages/protons/test/fixtures/circuit.ts +++ b/packages/protons/test/fixtures/circuit.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { enumeration, encodeMessage, decodeMessage, message, bytes } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface CircuitRelay { type?: CircuitRelay.Type @@ -53,7 +54,7 @@ export namespace CircuitRelay { export namespace Status { export const codec = () => { - return enumeration(__StatusValues) + return enumeration(__StatusValues) } } @@ -73,7 +74,7 @@ export namespace CircuitRelay { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -87,10 +88,54 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'id', codec: bytes }, - { id: 2, name: 'addrs', codec: bytes, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $id = obj.id + if ($id != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($id) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addrs = obj.addrs + if ($addrs != null) { + for (const value of $addrs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'id', codec: bytes }, + '2': { name: 'addrs', codec: bytes, repeats: true } + }) } return _codec @@ -109,12 +154,70 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: CircuitRelay.Type.codec(), optional: true }, - { id: 2, name: 'srcPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - { id: 3, name: 'dstPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - { id: 4, name: 'code', codec: CircuitRelay.Status.codec(), optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = CircuitRelay.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $srcPeer = obj.srcPeer + if ($srcPeer != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = CircuitRelay.Peer.codec().encode($srcPeer) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $dstPeer = obj.dstPeer + if ($dstPeer != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = CircuitRelay.Peer.codec().encode($dstPeer) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $code = obj.code + if ($code != null) { + const prefixField4 = Uint8Array.from([32]) + const encodedField4 = CircuitRelay.Status.codec().encode($code) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: CircuitRelay.Type.codec(), optional: true }, + '2': { name: 'srcPeer', codec: CircuitRelay.Peer.codec(), optional: true }, + '3': { name: 'dstPeer', codec: CircuitRelay.Peer.codec(), optional: true }, + '4': { name: 'code', codec: CircuitRelay.Status.codec(), optional: true } + }) } return _codec diff --git a/packages/protons/test/fixtures/daemon.ts b/packages/protons/test/fixtures/daemon.ts index 8c4736c..69aac11 100644 --- a/packages/protons/test/fixtures/daemon.ts +++ b/packages/protons/test/fixtures/daemon.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { enumeration, encodeMessage, decodeMessage, message, bytes, int64, string, int32 } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface Request { type: Request.Type @@ -46,7 +47,7 @@ export namespace Request { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -54,17 +55,115 @@ export namespace Request { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: Request.Type.codec() }, - { id: 2, name: 'connect', codec: ConnectRequest.codec(), optional: true }, - { id: 3, name: 'streamOpen', codec: StreamOpenRequest.codec(), optional: true }, - { id: 4, name: 'streamHandler', codec: StreamHandlerRequest.codec(), optional: true }, - { id: 5, name: 'dht', codec: DHTRequest.codec(), optional: true }, - { id: 6, name: 'connManager', codec: ConnManagerRequest.codec(), optional: true }, - { id: 7, name: 'disconnect', codec: DisconnectRequest.codec(), optional: true }, - { id: 8, name: 'pubsub', codec: PSRequest.codec(), optional: true }, - { id: 9, name: 'peerStore', codec: PeerstoreRequest.codec(), optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = Request.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $connect = obj.connect + if ($connect != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = ConnectRequest.codec().encode($connect) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $streamOpen = obj.streamOpen + if ($streamOpen != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = StreamOpenRequest.codec().encode($streamOpen) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $streamHandler = obj.streamHandler + if ($streamHandler != null) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = StreamHandlerRequest.codec().encode($streamHandler) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $dht = obj.dht + if ($dht != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = DHTRequest.codec().encode($dht) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + const $connManager = obj.connManager + if ($connManager != null) { + const prefixField6 = Uint8Array.from([50]) + const encodedField6 = ConnManagerRequest.codec().encode($connManager) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + + const $disconnect = obj.disconnect + if ($disconnect != null) { + const prefixField7 = Uint8Array.from([58]) + const encodedField7 = DisconnectRequest.codec().encode($disconnect) + bufs.push(prefixField7, ...encodedField7.bufs) + length += prefixField7.byteLength + encodedField7.length + } + + const $pubsub = obj.pubsub + if ($pubsub != null) { + const prefixField8 = Uint8Array.from([66]) + const encodedField8 = PSRequest.codec().encode($pubsub) + bufs.push(prefixField8, ...encodedField8.bufs) + length += prefixField8.byteLength + encodedField8.length + } + + const $peerStore = obj.peerStore + if ($peerStore != null) { + const prefixField9 = Uint8Array.from([74]) + const encodedField9 = PeerstoreRequest.codec().encode($peerStore) + bufs.push(prefixField9, ...encodedField9.bufs) + length += prefixField9.byteLength + encodedField9.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: Request.Type.codec() }, + '2': { name: 'connect', codec: ConnectRequest.codec(), optional: true }, + '3': { name: 'streamOpen', codec: StreamOpenRequest.codec(), optional: true }, + '4': { name: 'streamHandler', codec: StreamHandlerRequest.codec(), optional: true }, + '5': { name: 'dht', codec: DHTRequest.codec(), optional: true }, + '6': { name: 'connManager', codec: ConnManagerRequest.codec(), optional: true }, + '7': { name: 'disconnect', codec: DisconnectRequest.codec(), optional: true }, + '8': { name: 'pubsub', codec: PSRequest.codec(), optional: true }, + '9': { name: 'peerStore', codec: PeerstoreRequest.codec(), optional: true } + }) } return _codec @@ -103,7 +202,7 @@ export namespace Response { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -111,16 +210,108 @@ export namespace Response { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: Response.Type.codec() }, - { id: 2, name: 'error', codec: ErrorResponse.codec(), optional: true }, - { id: 3, name: 'streamInfo', codec: StreamInfo.codec(), optional: true }, - { id: 4, name: 'identify', codec: IdentifyResponse.codec(), optional: true }, - { id: 5, name: 'dht', codec: DHTResponse.codec(), optional: true }, - { id: 6, name: 'peers', codec: PeerInfo.codec(), repeats: true }, - { id: 7, name: 'pubsub', codec: PSResponse.codec(), optional: true }, - { id: 8, name: 'peerStore', codec: PeerstoreResponse.codec(), optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = Response.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $error = obj.error + if ($error != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = ErrorResponse.codec().encode($error) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $streamInfo = obj.streamInfo + if ($streamInfo != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = StreamInfo.codec().encode($streamInfo) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $identify = obj.identify + if ($identify != null) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = IdentifyResponse.codec().encode($identify) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $dht = obj.dht + if ($dht != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = DHTResponse.codec().encode($dht) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + const $peers = obj.peers + if ($peers != null) { + for (const value of $peers) { + const prefixField6 = Uint8Array.from([50]) + const encodedField6 = PeerInfo.codec().encode(value) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + } + + const $pubsub = obj.pubsub + if ($pubsub != null) { + const prefixField7 = Uint8Array.from([58]) + const encodedField7 = PSResponse.codec().encode($pubsub) + bufs.push(prefixField7, ...encodedField7.bufs) + length += prefixField7.byteLength + encodedField7.length + } + + const $peerStore = obj.peerStore + if ($peerStore != null) { + const prefixField8 = Uint8Array.from([66]) + const encodedField8 = PeerstoreResponse.codec().encode($peerStore) + bufs.push(prefixField8, ...encodedField8.bufs) + length += prefixField8.byteLength + encodedField8.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: Response.Type.codec() }, + '2': { name: 'error', codec: ErrorResponse.codec(), optional: true }, + '3': { name: 'streamInfo', codec: StreamInfo.codec(), optional: true }, + '4': { name: 'identify', codec: IdentifyResponse.codec(), optional: true }, + '5': { name: 'dht', codec: DHTResponse.codec(), optional: true }, + '6': { name: 'peers', codec: PeerInfo.codec(), repeats: true }, + '7': { name: 'pubsub', codec: PSResponse.codec(), optional: true }, + '8': { name: 'peerStore', codec: PeerstoreResponse.codec(), optional: true } + }) } return _codec @@ -145,10 +336,54 @@ export namespace IdentifyResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'id', codec: bytes }, - { id: 2, name: 'addrs', codec: bytes, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $id = obj.id + if ($id != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($id) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addrs = obj.addrs + if ($addrs != null) { + for (const value of $addrs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'id', codec: bytes }, + '2': { name: 'addrs', codec: bytes, repeats: true } + }) } return _codec @@ -174,11 +409,63 @@ export namespace ConnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'peer', codec: bytes }, - { id: 2, name: 'addrs', codec: bytes, repeats: true }, - { id: 3, name: 'timeout', codec: int64, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $peer = obj.peer + if ($peer != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($peer) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addrs = obj.addrs + if ($addrs != null) { + for (const value of $addrs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + const $timeout = obj.timeout + if ($timeout != null) { + const prefixField3 = Uint8Array.from([24]) + const encodedField3 = int64.encode($timeout) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'peer', codec: bytes }, + '2': { name: 'addrs', codec: bytes, repeats: true }, + '3': { name: 'timeout', codec: int64, optional: true } + }) } return _codec @@ -204,11 +491,63 @@ export namespace StreamOpenRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'peer', codec: bytes }, - { id: 2, name: 'proto', codec: string, repeats: true }, - { id: 3, name: 'timeout', codec: int64, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $peer = obj.peer + if ($peer != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($peer) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $proto = obj.proto + if ($proto != null) { + for (const value of $proto) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = string.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + const $timeout = obj.timeout + if ($timeout != null) { + const prefixField3 = Uint8Array.from([24]) + const encodedField3 = int64.encode($timeout) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'peer', codec: bytes }, + '2': { name: 'proto', codec: string, repeats: true }, + '3': { name: 'timeout', codec: int64, optional: true } + }) } return _codec @@ -233,10 +572,54 @@ export namespace StreamHandlerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'addr', codec: bytes }, - { id: 2, name: 'proto', codec: string, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $addr = obj.addr + if ($addr != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($addr) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $proto = obj.proto + if ($proto != null) { + for (const value of $proto) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = string.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'addr', codec: bytes }, + '2': { name: 'proto', codec: string, repeats: true } + }) } return _codec @@ -260,9 +643,43 @@ export namespace ErrorResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'msg', codec: string } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $msg = obj.msg + if ($msg != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($msg) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'msg', codec: string } + }) } return _codec @@ -288,11 +705,61 @@ export namespace StreamInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'peer', codec: bytes }, - { id: 2, name: 'addr', codec: bytes }, - { id: 3, name: 'proto', codec: string } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $peer = obj.peer + if ($peer != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($peer) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addr = obj.addr + if ($addr != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($addr) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $proto = obj.proto + if ($proto != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = string.encode($proto) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'peer', codec: bytes }, + '2': { name: 'addr', codec: bytes }, + '3': { name: 'proto', codec: string } + }) } return _codec @@ -344,7 +811,7 @@ export namespace DHTRequest { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -352,15 +819,97 @@ export namespace DHTRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: DHTRequest.Type.codec() }, - { id: 2, name: 'peer', codec: bytes, optional: true }, - { id: 3, name: 'cid', codec: bytes, optional: true }, - { id: 4, name: 'key', codec: bytes, optional: true }, - { id: 5, name: 'value', codec: bytes, optional: true }, - { id: 6, name: 'count', codec: int32, optional: true }, - { id: 7, name: 'timeout', codec: int64, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = DHTRequest.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $peer = obj.peer + if ($peer != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($peer) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $cid = obj.cid + if ($cid != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($cid) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $key = obj.key + if ($key != null) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = bytes.encode($key) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $value = obj.value + if ($value != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = bytes.encode($value) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + const $count = obj.count + if ($count != null) { + const prefixField6 = Uint8Array.from([48]) + const encodedField6 = int32.encode($count) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + + const $timeout = obj.timeout + if ($timeout != null) { + const prefixField7 = Uint8Array.from([56]) + const encodedField7 = int64.encode($timeout) + bufs.push(prefixField7, ...encodedField7.bufs) + length += prefixField7.byteLength + encodedField7.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: DHTRequest.Type.codec() }, + '2': { name: 'peer', codec: bytes, optional: true }, + '3': { name: 'cid', codec: bytes, optional: true }, + '4': { name: 'key', codec: bytes, optional: true }, + '5': { name: 'value', codec: bytes, optional: true }, + '6': { name: 'count', codec: int32, optional: true }, + '7': { name: 'timeout', codec: int64, optional: true } + }) } return _codec @@ -396,7 +945,7 @@ export namespace DHTResponse { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -404,11 +953,61 @@ export namespace DHTResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: DHTResponse.Type.codec() }, - { id: 2, name: 'peer', codec: PeerInfo.codec(), optional: true }, - { id: 3, name: 'value', codec: bytes, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = DHTResponse.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $peer = obj.peer + if ($peer != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = PeerInfo.codec().encode($peer) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $value = obj.value + if ($value != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($value) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: DHTResponse.Type.codec() }, + '2': { name: 'peer', codec: PeerInfo.codec(), optional: true }, + '3': { name: 'value', codec: bytes, optional: true } + }) } return _codec @@ -433,10 +1032,54 @@ export namespace PeerInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'id', codec: bytes }, - { id: 2, name: 'addrs', codec: bytes, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $id = obj.id + if ($id != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($id) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addrs = obj.addrs + if ($addrs != null) { + for (const value of $addrs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'id', codec: bytes }, + '2': { name: 'addrs', codec: bytes, repeats: true } + }) } return _codec @@ -473,7 +1116,7 @@ export namespace ConnManagerRequest { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -481,12 +1124,70 @@ export namespace ConnManagerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: ConnManagerRequest.Type.codec() }, - { id: 2, name: 'peer', codec: bytes, optional: true }, - { id: 3, name: 'tag', codec: string, optional: true }, - { id: 4, name: 'weight', codec: int64, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = ConnManagerRequest.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $peer = obj.peer + if ($peer != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($peer) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $tag = obj.tag + if ($tag != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = string.encode($tag) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $weight = obj.weight + if ($weight != null) { + const prefixField4 = Uint8Array.from([32]) + const encodedField4 = int64.encode($weight) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: ConnManagerRequest.Type.codec() }, + '2': { name: 'peer', codec: bytes, optional: true }, + '3': { name: 'tag', codec: string, optional: true }, + '4': { name: 'weight', codec: int64, optional: true } + }) } return _codec @@ -510,9 +1211,43 @@ export namespace DisconnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'peer', codec: bytes } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $peer = obj.peer + if ($peer != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($peer) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'peer', codec: bytes } + }) } return _codec @@ -550,7 +1285,7 @@ export namespace PSRequest { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -558,11 +1293,61 @@ export namespace PSRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: PSRequest.Type.codec() }, - { id: 2, name: 'topic', codec: string, optional: true }, - { id: 3, name: 'data', codec: bytes, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = PSRequest.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $topic = obj.topic + if ($topic != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = string.encode($topic) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $data = obj.data + if ($data != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($data) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: PSRequest.Type.codec() }, + '2': { name: 'topic', codec: string, optional: true }, + '3': { name: 'data', codec: bytes, optional: true } + }) } return _codec @@ -591,14 +1376,90 @@ export namespace PSMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'from', codec: bytes, optional: true }, - { id: 2, name: 'data', codec: bytes, optional: true }, - { id: 3, name: 'seqno', codec: bytes, optional: true }, - { id: 4, name: 'topicIDs', codec: string, repeats: true }, - { id: 5, name: 'signature', codec: bytes, optional: true }, - { id: 6, name: 'key', codec: bytes, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $from = obj.from + if ($from != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($from) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $data = obj.data + if ($data != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($data) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $seqno = obj.seqno + if ($seqno != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($seqno) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $topicIDs = obj.topicIDs + if ($topicIDs != null) { + for (const value of $topicIDs) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = string.encode(value) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + } + + const $signature = obj.signature + if ($signature != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = bytes.encode($signature) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + const $key = obj.key + if ($key != null) { + const prefixField6 = Uint8Array.from([50]) + const encodedField6 = bytes.encode($key) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'from', codec: bytes, optional: true }, + '2': { name: 'data', codec: bytes, optional: true }, + '3': { name: 'seqno', codec: bytes, optional: true }, + '4': { name: 'topicIDs', codec: string, repeats: true }, + '5': { name: 'signature', codec: bytes, optional: true }, + '6': { name: 'key', codec: bytes, optional: true } + }) } return _codec @@ -623,10 +1484,56 @@ export namespace PSResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'topics', codec: string, repeats: true }, - { id: 2, name: 'peerIDs', codec: bytes, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $topics = obj.topics + if ($topics != null) { + for (const value of $topics) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode(value) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + } + + const $peerIDs = obj.peerIDs + if ($peerIDs != null) { + for (const value of $peerIDs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'topics', codec: string, repeats: true }, + '2': { name: 'peerIDs', codec: bytes, repeats: true } + }) } return _codec @@ -660,7 +1567,7 @@ export namespace PeerstoreRequest { export namespace Type { export const codec = () => { - return enumeration(__TypeValues) + return enumeration(__TypeValues) } } @@ -668,11 +1575,63 @@ export namespace PeerstoreRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: PeerstoreRequest.Type.codec() }, - { id: 2, name: 'id', codec: bytes, optional: true }, - { id: 3, name: 'protos', codec: string, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = PeerstoreRequest.Type.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $id = obj.id + if ($id != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($id) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $protos = obj.protos + if ($protos != null) { + for (const value of $protos) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = string.encode(value) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: PeerstoreRequest.Type.codec() }, + '2': { name: 'id', codec: bytes, optional: true }, + '3': { name: 'protos', codec: string, repeats: true } + }) } return _codec @@ -697,10 +1656,54 @@ export namespace PeerstoreResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'peer', codec: PeerInfo.codec(), optional: true }, - { id: 2, name: 'protos', codec: string, repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $peer = obj.peer + if ($peer != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = PeerInfo.codec().encode($peer) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $protos = obj.protos + if ($protos != null) { + for (const value of $protos) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = string.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'peer', codec: PeerInfo.codec(), optional: true }, + '2': { name: 'protos', codec: string, repeats: true } + }) } return _codec diff --git a/packages/protons/test/fixtures/dht.ts b/packages/protons/test/fixtures/dht.ts index 08d949f..2732830 100644 --- a/packages/protons/test/fixtures/dht.ts +++ b/packages/protons/test/fixtures/dht.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { encodeMessage, decodeMessage, message, bytes, string, enumeration, int32 } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface Record { key?: Uint8Array @@ -18,13 +19,79 @@ export namespace Record { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'key', codec: bytes, optional: true }, - { id: 2, name: 'value', codec: bytes, optional: true }, - { id: 3, name: 'author', codec: bytes, optional: true }, - { id: 4, name: 'signature', codec: bytes, optional: true }, - { id: 5, name: 'timeReceived', codec: string, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $key = obj.key + if ($key != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($key) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $value = obj.value + if ($value != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $author = obj.author + if ($author != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($author) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $signature = obj.signature + if ($signature != null) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = bytes.encode($signature) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $timeReceived = obj.timeReceived + if ($timeReceived != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = string.encode($timeReceived) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'key', codec: bytes, optional: true }, + '2': { name: 'value', codec: bytes, optional: true }, + '3': { name: 'author', codec: bytes, optional: true }, + '4': { name: 'signature', codec: bytes, optional: true }, + '5': { name: 'timeReceived', codec: string, optional: true } + }) } return _codec @@ -69,7 +136,7 @@ export namespace Message { export namespace MessageType { export const codec = () => { - return enumeration(__MessageTypeValues) + return enumeration(__MessageTypeValues) } } @@ -89,7 +156,7 @@ export namespace Message { export namespace ConnectionType { export const codec = () => { - return enumeration(__ConnectionTypeValues) + return enumeration(__ConnectionTypeValues) } } @@ -104,11 +171,63 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'id', codec: bytes, optional: true }, - { id: 2, name: 'addrs', codec: bytes, repeats: true }, - { id: 3, name: 'connection', codec: Message.ConnectionType.codec(), optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $id = obj.id + if ($id != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($id) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $addrs = obj.addrs + if ($addrs != null) { + for (const value of $addrs) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + const $connection = obj.connection + if ($connection != null) { + const prefixField3 = Uint8Array.from([24]) + const encodedField3 = Message.ConnectionType.codec().encode($connection) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'id', codec: bytes, optional: true }, + '2': { name: 'addrs', codec: bytes, repeats: true }, + '3': { name: 'connection', codec: Message.ConnectionType.codec(), optional: true } + }) } return _codec @@ -127,14 +246,92 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'type', codec: Message.MessageType.codec(), optional: true }, - { id: 10, name: 'clusterLevelRaw', codec: int32, optional: true }, - { id: 2, name: 'key', codec: bytes, optional: true }, - { id: 3, name: 'record', codec: bytes, optional: true }, - { id: 8, name: 'closerPeers', codec: Message.Peer.codec(), repeats: true }, - { id: 9, name: 'providerPeers', codec: Message.Peer.codec(), repeats: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $type = obj.type + if ($type != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = Message.MessageType.codec().encode($type) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $clusterLevelRaw = obj.clusterLevelRaw + if ($clusterLevelRaw != null) { + const prefixField10 = Uint8Array.from([80]) + const encodedField10 = int32.encode($clusterLevelRaw) + bufs.push(prefixField10, ...encodedField10.bufs) + length += prefixField10.byteLength + encodedField10.length + } + + const $key = obj.key + if ($key != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($key) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $record = obj.record + if ($record != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($record) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $closerPeers = obj.closerPeers + if ($closerPeers != null) { + for (const value of $closerPeers) { + const prefixField8 = Uint8Array.from([66]) + const encodedField8 = Message.Peer.codec().encode(value) + bufs.push(prefixField8, ...encodedField8.bufs) + length += prefixField8.byteLength + encodedField8.length + } + } + + const $providerPeers = obj.providerPeers + if ($providerPeers != null) { + for (const value of $providerPeers) { + const prefixField9 = Uint8Array.from([74]) + const encodedField9 = Message.Peer.codec().encode(value) + bufs.push(prefixField9, ...encodedField9.bufs) + length += prefixField9.byteLength + encodedField9.length + } + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'type', codec: Message.MessageType.codec(), optional: true }, + '10': { name: 'clusterLevelRaw', codec: int32, optional: true }, + '2': { name: 'key', codec: bytes, optional: true }, + '3': { name: 'record', codec: bytes, optional: true }, + '8': { name: 'closerPeers', codec: Message.Peer.codec(), repeats: true }, + '9': { name: 'providerPeers', codec: Message.Peer.codec(), repeats: true } + }) } return _codec diff --git a/packages/protons/test/fixtures/noise.ts b/packages/protons/test/fixtures/noise.ts index dc6b225..0c3fc0d 100644 --- a/packages/protons/test/fixtures/noise.ts +++ b/packages/protons/test/fixtures/noise.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { encodeMessage, decodeMessage, message, bytes } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export namespace pb { export interface NoiseHandshakePayload { @@ -17,11 +18,61 @@ export namespace pb { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'identityKey', codec: bytes }, - { id: 2, name: 'identitySig', codec: bytes }, - { id: 3, name: 'data', codec: bytes } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $identityKey = obj.identityKey + if ($identityKey != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($identityKey) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $identitySig = obj.identitySig + if ($identitySig != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($identitySig) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $data = obj.data + if ($data != null) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = bytes.encode($data) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'identityKey', codec: bytes }, + '2': { name: 'identitySig', codec: bytes }, + '3': { name: 'data', codec: bytes } + }) } return _codec diff --git a/packages/protons/test/fixtures/peer.ts b/packages/protons/test/fixtures/peer.ts index 39f55cd..a80a6c6 100644 --- a/packages/protons/test/fixtures/peer.ts +++ b/packages/protons/test/fixtures/peer.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { encodeMessage, decodeMessage, message, string, bytes, bool } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export interface Peer { addresses: Address[] @@ -18,13 +19,85 @@ export namespace Peer { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'addresses', codec: Address.codec(), repeats: true }, - { id: 2, name: 'protocols', codec: string, repeats: true }, - { id: 3, name: 'metadata', codec: Metadata.codec(), repeats: true }, - { id: 4, name: 'pubKey', codec: bytes, optional: true }, - { id: 5, name: 'peerRecordEnvelope', codec: bytes, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $addresses = obj.addresses + if ($addresses != null) { + for (const value of $addresses) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = Address.codec().encode(value) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + } + + const $protocols = obj.protocols + if ($protocols != null) { + for (const value of $protocols) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = string.encode(value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + } + + const $metadata = obj.metadata + if ($metadata != null) { + for (const value of $metadata) { + const prefixField3 = Uint8Array.from([26]) + const encodedField3 = Metadata.codec().encode(value) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + } + + const $pubKey = obj.pubKey + if ($pubKey != null) { + const prefixField4 = Uint8Array.from([34]) + const encodedField4 = bytes.encode($pubKey) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $peerRecordEnvelope = obj.peerRecordEnvelope + if ($peerRecordEnvelope != null) { + const prefixField5 = Uint8Array.from([42]) + const encodedField5 = bytes.encode($peerRecordEnvelope) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'addresses', codec: Address.codec(), repeats: true }, + '2': { name: 'protocols', codec: string, repeats: true }, + '3': { name: 'metadata', codec: Metadata.codec(), repeats: true }, + '4': { name: 'pubKey', codec: bytes, optional: true }, + '5': { name: 'peerRecordEnvelope', codec: bytes, optional: true } + }) } return _codec @@ -49,10 +122,52 @@ export namespace Address { export const codec = (): Codec
=> { if (_codec == null) { - _codec = message
([ - { id: 1, name: 'multiaddr', codec: bytes }, - { id: 2, name: 'isCertified', codec: bool, optional: true } - ]) + _codec = message
((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $multiaddr = obj.multiaddr + if ($multiaddr != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = bytes.encode($multiaddr) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $isCertified = obj.isCertified + if ($isCertified != null) { + const prefixField2 = Uint8Array.from([16]) + const encodedField2 = bool.encode($isCertified) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'multiaddr', codec: bytes }, + '2': { name: 'isCertified', codec: bool, optional: true } + }) } return _codec @@ -77,10 +192,52 @@ export namespace Metadata { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'key', codec: string }, - { id: 2, name: 'value', codec: bytes } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $key = obj.key + if ($key != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($key) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $value = obj.value + if ($value != null) { + const prefixField2 = Uint8Array.from([18]) + const encodedField2 = bytes.encode($value) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'key', codec: string }, + '2': { name: 'value', codec: bytes } + }) } return _codec diff --git a/packages/protons/test/fixtures/test.ts b/packages/protons/test/fixtures/test.ts index c7e5918..b90dabb 100644 --- a/packages/protons/test/fixtures/test.ts +++ b/packages/protons/test/fixtures/test.ts @@ -2,8 +2,9 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { enumeration, encodeMessage, decodeMessage, message, string, bool, int32, int64, uint32, uint64, sint32, sint64, double, float, bytes, fixed32, fixed64, sfixed32, sfixed64 } from 'protons-runtime' -import type { Codec } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' +import { unsigned } from 'uint8-varint' +import type { Codec } from 'protons-runtime' export enum AnEnum { HERP = 'HERP', @@ -17,7 +18,7 @@ enum __AnEnumValues { export namespace AnEnum { export const codec = () => { - return enumeration(__AnEnumValues) + return enumeration(__AnEnumValues) } } export interface SubMessage { @@ -29,9 +30,43 @@ export namespace SubMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'foo', codec: string } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $foo = obj.foo + if ($foo != null) { + const prefixField1 = Uint8Array.from([10]) + const encodedField1 = string.encode($foo) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'foo', codec: string } + }) } return _codec @@ -72,26 +107,198 @@ export namespace AllTheTypes { export const codec = (): Codec => { if (_codec == null) { - _codec = message([ - { id: 1, name: 'field1', codec: bool, optional: true }, - { id: 2, name: 'field2', codec: int32, optional: true }, - { id: 3, name: 'field3', codec: int64, optional: true }, - { id: 4, name: 'field4', codec: uint32, optional: true }, - { id: 5, name: 'field5', codec: uint64, optional: true }, - { id: 6, name: 'field6', codec: sint32, optional: true }, - { id: 7, name: 'field7', codec: sint64, optional: true }, - { id: 8, name: 'field8', codec: double, optional: true }, - { id: 9, name: 'field9', codec: float, optional: true }, - { id: 10, name: 'field10', codec: string, optional: true }, - { id: 11, name: 'field11', codec: bytes, optional: true }, - { id: 12, name: 'field12', codec: AnEnum.codec(), optional: true }, - { id: 13, name: 'field13', codec: SubMessage.codec(), optional: true }, - { id: 14, name: 'field14', codec: string, repeats: true }, - { id: 15, name: 'field15', codec: fixed32, optional: true }, - { id: 16, name: 'field16', codec: fixed64, optional: true }, - { id: 17, name: 'field17', codec: sfixed32, optional: true }, - { id: 18, name: 'field18', codec: sfixed64, optional: true } - ]) + _codec = message((obj, opts = {}) => { + const bufs: Uint8Array[] = [] + + if (opts.lengthDelimited !== false) { + // will hold length prefix + bufs.push(new Uint8Array(0)) + } + + let length = 0 + + const $field1 = obj.field1 + if ($field1 != null) { + const prefixField1 = Uint8Array.from([8]) + const encodedField1 = bool.encode($field1) + bufs.push(prefixField1, ...encodedField1.bufs) + length += prefixField1.byteLength + encodedField1.length + } + + const $field2 = obj.field2 + if ($field2 != null) { + const prefixField2 = Uint8Array.from([16]) + const encodedField2 = int32.encode($field2) + bufs.push(prefixField2, ...encodedField2.bufs) + length += prefixField2.byteLength + encodedField2.length + } + + const $field3 = obj.field3 + if ($field3 != null) { + const prefixField3 = Uint8Array.from([24]) + const encodedField3 = int64.encode($field3) + bufs.push(prefixField3, ...encodedField3.bufs) + length += prefixField3.byteLength + encodedField3.length + } + + const $field4 = obj.field4 + if ($field4 != null) { + const prefixField4 = Uint8Array.from([32]) + const encodedField4 = uint32.encode($field4) + bufs.push(prefixField4, ...encodedField4.bufs) + length += prefixField4.byteLength + encodedField4.length + } + + const $field5 = obj.field5 + if ($field5 != null) { + const prefixField5 = Uint8Array.from([40]) + const encodedField5 = uint64.encode($field5) + bufs.push(prefixField5, ...encodedField5.bufs) + length += prefixField5.byteLength + encodedField5.length + } + + const $field6 = obj.field6 + if ($field6 != null) { + const prefixField6 = Uint8Array.from([48]) + const encodedField6 = sint32.encode($field6) + bufs.push(prefixField6, ...encodedField6.bufs) + length += prefixField6.byteLength + encodedField6.length + } + + const $field7 = obj.field7 + if ($field7 != null) { + const prefixField7 = Uint8Array.from([56]) + const encodedField7 = sint64.encode($field7) + bufs.push(prefixField7, ...encodedField7.bufs) + length += prefixField7.byteLength + encodedField7.length + } + + const $field8 = obj.field8 + if ($field8 != null) { + const prefixField8 = Uint8Array.from([65]) + const encodedField8 = double.encode($field8) + bufs.push(prefixField8, ...encodedField8.bufs) + length += prefixField8.byteLength + encodedField8.length + } + + const $field9 = obj.field9 + if ($field9 != null) { + const prefixField9 = Uint8Array.from([77]) + const encodedField9 = float.encode($field9) + bufs.push(prefixField9, ...encodedField9.bufs) + length += prefixField9.byteLength + encodedField9.length + } + + const $field10 = obj.field10 + if ($field10 != null) { + const prefixField10 = Uint8Array.from([82]) + const encodedField10 = string.encode($field10) + bufs.push(prefixField10, ...encodedField10.bufs) + length += prefixField10.byteLength + encodedField10.length + } + + const $field11 = obj.field11 + if ($field11 != null) { + const prefixField11 = Uint8Array.from([90]) + const encodedField11 = bytes.encode($field11) + bufs.push(prefixField11, ...encodedField11.bufs) + length += prefixField11.byteLength + encodedField11.length + } + + const $field12 = obj.field12 + if ($field12 != null) { + const prefixField12 = Uint8Array.from([96]) + const encodedField12 = AnEnum.codec().encode($field12) + bufs.push(prefixField12, ...encodedField12.bufs) + length += prefixField12.byteLength + encodedField12.length + } + + const $field13 = obj.field13 + if ($field13 != null) { + const prefixField13 = Uint8Array.from([106]) + const encodedField13 = SubMessage.codec().encode($field13) + bufs.push(prefixField13, ...encodedField13.bufs) + length += prefixField13.byteLength + encodedField13.length + } + + const $field14 = obj.field14 + if ($field14 != null) { + for (const value of $field14) { + const prefixField14 = Uint8Array.from([114]) + const encodedField14 = string.encode(value) + bufs.push(prefixField14, ...encodedField14.bufs) + length += prefixField14.byteLength + encodedField14.length + } + } + + const $field15 = obj.field15 + if ($field15 != null) { + const prefixField15 = Uint8Array.from([125]) + const encodedField15 = fixed32.encode($field15) + bufs.push(prefixField15, ...encodedField15.bufs) + length += prefixField15.byteLength + encodedField15.length + } + + const $field16 = obj.field16 + if ($field16 != null) { + const prefixField16 = Uint8Array.from([129, 1]) + const encodedField16 = fixed64.encode($field16) + bufs.push(prefixField16, ...encodedField16.bufs) + length += prefixField16.byteLength + encodedField16.length + } + + const $field17 = obj.field17 + if ($field17 != null) { + const prefixField17 = Uint8Array.from([141, 1]) + const encodedField17 = sfixed32.encode($field17) + bufs.push(prefixField17, ...encodedField17.bufs) + length += prefixField17.byteLength + encodedField17.length + } + + const $field18 = obj.field18 + if ($field18 != null) { + const prefixField18 = Uint8Array.from([145, 1]) + const encodedField18 = sfixed64.encode($field18) + bufs.push(prefixField18, ...encodedField18.bufs) + length += prefixField18.byteLength + encodedField18.length + } + + if (opts.lengthDelimited !== false) { + const prefix = unsigned.encode(length) + + bufs[0] = prefix + length += prefix.byteLength + + return { + bufs, + length + } + } + + return { + bufs, + length + } + }, { + '1': { name: 'field1', codec: bool, optional: true }, + '2': { name: 'field2', codec: int32, optional: true }, + '3': { name: 'field3', codec: int64, optional: true }, + '4': { name: 'field4', codec: uint32, optional: true }, + '5': { name: 'field5', codec: uint64, optional: true }, + '6': { name: 'field6', codec: sint32, optional: true }, + '7': { name: 'field7', codec: sint64, optional: true }, + '8': { name: 'field8', codec: double, optional: true }, + '9': { name: 'field9', codec: float, optional: true }, + '10': { name: 'field10', codec: string, optional: true }, + '11': { name: 'field11', codec: bytes, optional: true }, + '12': { name: 'field12', codec: AnEnum.codec(), optional: true }, + '13': { name: 'field13', codec: SubMessage.codec(), optional: true }, + '14': { name: 'field14', codec: string, repeats: true }, + '15': { name: 'field15', codec: fixed32, optional: true }, + '16': { name: 'field16', codec: fixed64, optional: true }, + '17': { name: 'field17', codec: sfixed32, optional: true }, + '18': { name: 'field18', codec: sfixed64, optional: true } + }) } return _codec diff --git a/packages/protons/tsconfig.json b/packages/protons/tsconfig.json index cac17d8..fbb3170 100644 --- a/packages/protons/tsconfig.json +++ b/packages/protons/tsconfig.json @@ -8,6 +8,9 @@ "src", "test" ], + "exclude": [ + "test/fixtures/*.pbjs.ts" + ], "references": [ { "path": "../protons-runtime" From 40734009775f7b1f77ac90c1ca80c33e02448495 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sun, 7 Aug 2022 15:24:52 +0100 Subject: [PATCH 09/14] feat: use protobufjs internals --- lerna.json | 4 +- packages/protons-benchmark/package.json | 2 +- packages/protons-benchmark/src/bench.proto | 1 + packages/protons-benchmark/src/decode.ts | 50 + packages/protons-benchmark/src/encode.ts | 48 + .../protons-benchmark/src/protons/bench.ts | 334 ++- packages/protons-runtime/package.json | 6 +- packages/protons-runtime/src/codec.ts | 16 +- packages/protons-runtime/src/codecs/bool.ts | 20 - packages/protons-runtime/src/codecs/bytes.ts | 33 - packages/protons-runtime/src/codecs/double.ts | 27 - packages/protons-runtime/src/codecs/enum.ts | 36 +- .../protons-runtime/src/codecs/fixed32.ts | 27 - .../protons-runtime/src/codecs/fixed64.ts | 27 - packages/protons-runtime/src/codecs/float.ts | 27 - packages/protons-runtime/src/codecs/int32.ts | 34 - packages/protons-runtime/src/codecs/int64.ts | 34 - .../protons-runtime/src/codecs/message.ts | 231 +-- .../protons-runtime/src/codecs/sfixed32.ts | 27 - .../protons-runtime/src/codecs/sfixed64.ts | 27 - packages/protons-runtime/src/codecs/sint32.ts | 29 - packages/protons-runtime/src/codecs/sint64.ts | 25 - packages/protons-runtime/src/codecs/string.ts | 35 - packages/protons-runtime/src/codecs/uint32.ts | 25 - packages/protons-runtime/src/codecs/uint64.ts | 25 - packages/protons-runtime/src/decode.ts | 27 +- packages/protons-runtime/src/encode.ts | 26 +- packages/protons-runtime/src/index.ts | 220 +- packages/protons-runtime/src/utils/utf8.ts | 123 -- packages/protons/package.json | 3 +- packages/protons/src/index.ts | 264 ++- packages/protons/test/fixtures/basic.ts | 68 +- packages/protons/test/fixtures/circuit.ts | 172 +- packages/protons/test/fixtures/daemon.ts | 1803 +++++++++-------- packages/protons/test/fixtures/dht.ts | 328 +-- packages/protons/test/fixtures/noise.ts | 95 +- packages/protons/test/fixtures/peer.ts | 282 +-- packages/protons/test/fixtures/test.ts | 337 ++- packages/protons/test/index.spec.ts | 7 +- packages/protons/tsconfig.json | 3 +- 40 files changed, 2294 insertions(+), 2614 deletions(-) create mode 100644 packages/protons-benchmark/src/decode.ts create mode 100644 packages/protons-benchmark/src/encode.ts delete mode 100644 packages/protons-runtime/src/codecs/bool.ts delete mode 100644 packages/protons-runtime/src/codecs/bytes.ts delete mode 100644 packages/protons-runtime/src/codecs/double.ts delete mode 100644 packages/protons-runtime/src/codecs/fixed32.ts delete mode 100644 packages/protons-runtime/src/codecs/fixed64.ts delete mode 100644 packages/protons-runtime/src/codecs/float.ts delete mode 100644 packages/protons-runtime/src/codecs/int32.ts delete mode 100644 packages/protons-runtime/src/codecs/int64.ts delete mode 100644 packages/protons-runtime/src/codecs/sfixed32.ts delete mode 100644 packages/protons-runtime/src/codecs/sfixed64.ts delete mode 100644 packages/protons-runtime/src/codecs/sint32.ts delete mode 100644 packages/protons-runtime/src/codecs/sint64.ts delete mode 100644 packages/protons-runtime/src/codecs/string.ts delete mode 100644 packages/protons-runtime/src/codecs/uint32.ts delete mode 100644 packages/protons-runtime/src/codecs/uint64.ts delete mode 100644 packages/protons-runtime/src/utils/utf8.ts diff --git a/lerna.json b/lerna.json index aa6d000..d7a2336 100644 --- a/lerna.json +++ b/lerna.json @@ -1,8 +1,6 @@ { "lerna": "4.0.0", - "packages": [ - "packages/*" - ], + "useWorkspaces": true, "version": "independent", "command": { "run": { diff --git a/packages/protons-benchmark/package.json b/packages/protons-benchmark/package.json index b3d3ac8..43c0ce3 100644 --- a/packages/protons-benchmark/package.json +++ b/packages/protons-benchmark/package.json @@ -72,7 +72,7 @@ "aegir": "^37.0.5", "benchmark": "^2.1.4", "pbjs": "^0.0.14", - "protobufjs": "^6.11.2", + "protobufjs": "^7.0.0", "protons": "^4.0.0", "protons-runtime": "^2.0.0" }, diff --git a/packages/protons-benchmark/src/bench.proto b/packages/protons-benchmark/src/bench.proto index c50b5a8..126ac8e 100755 --- a/packages/protons-benchmark/src/bench.proto +++ b/packages/protons-benchmark/src/bench.proto @@ -1,3 +1,4 @@ +syntax = "proto3"; message Foo { optional uint32 baz = 1; diff --git a/packages/protons-benchmark/src/decode.ts b/packages/protons-benchmark/src/decode.ts new file mode 100644 index 0000000..3d31254 --- /dev/null +++ b/packages/protons-benchmark/src/decode.ts @@ -0,0 +1,50 @@ +/* eslint-disable no-console */ + +/* +$ node dist/src/index.js +$ npx playwright-test dist/src/index.js --runner benchmark +*/ + +import Benchmark from 'benchmark' +import { Test as ProtonsTest } from './protons/bench.js' +import { decodeTest as pbjsDecodeTest } from './pbjs/bench.js' +import { Test as ProtobufjsTest } from './protobufjs/bench.js' + +const message = { + meh: { + lol: 'sdkljfoee', + b: { + tmp: { + baz: 2309292 + } + } + }, + hello: 3493822, + foo: 'derp derp derp', + payload: Uint8Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +} + +const buf = ProtonsTest.encode(message).subarray() + +new Benchmark.Suite() + .add('pbjs', () => { + pbjsDecodeTest(buf) + }) + .add('protons', () => { + ProtonsTest.decode(buf) + }) + .add('protobufjs', () => { + ProtobufjsTest.decode(buf) + }) + .on('error', (err: Error) => { + console.error(err) + }) + .on('cycle', (event: any) => { + console.info(String(event.target)) + }) + .on('complete', function () { + // @ts-expect-error types are wrong + console.info(`Fastest is ${this.filter('fastest').map('name')}`) // eslint-disable-line @typescript-eslint/restrict-template-expressions + }) + // run async + .run({ async: true }) diff --git a/packages/protons-benchmark/src/encode.ts b/packages/protons-benchmark/src/encode.ts new file mode 100644 index 0000000..e911ccf --- /dev/null +++ b/packages/protons-benchmark/src/encode.ts @@ -0,0 +1,48 @@ +/* eslint-disable no-console */ + +/* +$ node dist/src/index.js +$ npx playwright-test dist/src/index.js --runner benchmark +*/ + +import Benchmark from 'benchmark' +import { Test as ProtonsTest } from './protons/bench.js' +import { encodeTest as pbjsEncodeTest } from './pbjs/bench.js' +import { Test as ProtobufjsTest } from './protobufjs/bench.js' + +const message = { + meh: { + lol: 'sdkljfoee', + b: { + tmp: { + baz: 2309292 + } + } + }, + hello: 3493822, + foo: 'derp derp derp', + payload: Uint8Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +} + +new Benchmark.Suite() + .add('pbjs', () => { + pbjsEncodeTest(message) + }) + .add('protons', () => { + ProtonsTest.encode(message) + }) + .add('protobufjs', () => { + ProtobufjsTest.encode(message).finish() + }) + .on('error', (err: Error) => { + console.error(err) + }) + .on('cycle', (event: any) => { + console.info(String(event.target)) + }) + .on('complete', function () { + // @ts-expect-error types are wrong + console.info(`Fastest is ${this.filter('fastest').map('name')}`) // eslint-disable-line @typescript-eslint/restrict-template-expressions + }) + // run async + .run({ async: true }) diff --git a/packages/protons-benchmark/src/protons/bench.ts b/packages/protons-benchmark/src/protons/bench.ts index 2020a93..efcdf84 100644 --- a/packages/protons-benchmark/src/protons/bench.ts +++ b/packages/protons-benchmark/src/protons/bench.ts @@ -1,13 +1,12 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { encodeMessage, decodeMessage, message, uint32, enumeration, string, bytes } from 'protons-runtime' +import { encodeMessage, decodeMessage, message, enumeration } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface Foo { - baz: number + baz?: number } export namespace Foo { @@ -15,49 +14,45 @@ export namespace Foo { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $baz = obj.baz - if ($baz != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = uint32.encode($baz) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.baz != null) { + writer.uint32(8) + writer.uint32(obj.baz) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length - bufs[0] = prefix - length += prefix.byteLength + while (reader.pos < end) { + const tag = reader.uint32() - return { - bufs, - length + switch (tag >>> 3) { + case 1: + obj.baz = reader.uint32() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '1': { name: 'baz', codec: uint32 } + return obj }) } return _codec } - export const encode = (obj: Foo): Uint8ArrayList => { + export const encode = (obj: Foo): Uint8Array => { return encodeMessage(obj, Foo.codec()) } @@ -67,7 +62,7 @@ export namespace Foo { } export interface Bar { - tmp: Foo + tmp?: Foo } export namespace Bar { @@ -75,49 +70,45 @@ export namespace Bar { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $tmp = obj.tmp - if ($tmp != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = Foo.codec().encode($tmp) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.tmp != null) { + writer.uint32(10) + Foo.codec().encode(obj.tmp, writer) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} - bufs[0] = prefix - length += prefix.byteLength + const end = length == null ? reader.len : reader.pos + length - return { - bufs, - length + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.tmp = Foo.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '1': { name: 'tmp', codec: Foo.codec() } + return obj }) } return _codec } - export const encode = (obj: Bar): Uint8ArrayList => { + export const encode = (obj: Bar): Uint8Array => { return encodeMessage(obj, Bar.codec()) } @@ -150,51 +141,56 @@ export namespace Yo { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $lol = obj.lol - if ($lol != null) { - for (const value of $lol) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = FOO.codec().encode(value) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.lol != null) { + for (const value of obj.lol) { + writer.uint32(8) + FOO.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "lol" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.lol = obj.lol ?? [] + obj.lol.push(FOO.codec().decode(reader)) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + obj.lol = obj.lol ?? [] + + if (obj.lol == null) { + throw new Error('Protocol error: value for required field "lol" was not found in protobuf') } - }, { - '1': { name: 'lol', codec: FOO.codec(), repeats: true } + + return obj }) } return _codec } - export const encode = (obj: Yo): Uint8ArrayList => { + export const encode = (obj: Yo): Uint8Array => { return encodeMessage(obj, Yo.codec()) } @@ -204,7 +200,7 @@ export namespace Yo { } export interface Lol { - lol: string + lol?: string b: Bar } @@ -213,58 +209,59 @@ export namespace Lol { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $lol = obj.lol - if ($lol != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($lol) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.lol != null) { + writer.uint32(10) + writer.string(obj.lol) } - const $b = obj.b - if ($b != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = Bar.codec().encode($b) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.b != null) { + writer.uint32(18) + Bar.codec().encode(obj.b, writer) + } else { + throw new Error('Protocol error: required field "b" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.lol = reader.string() + break + case 2: + obj.b = Bar.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.b == null) { + throw new Error('Protocol error: value for required field "b" was not found in protobuf') } - }, { - '1': { name: 'lol', codec: string }, - '2': { name: 'b', codec: Bar.codec() } + + return obj }) } return _codec } - export const encode = (obj: Lol): Uint8ArrayList => { + export const encode = (obj: Lol): Uint8Array => { return encodeMessage(obj, Lol.codec()) } @@ -274,10 +271,10 @@ export namespace Lol { } export interface Test { - meh: Lol - hello: number - foo: string - payload: Uint8Array + meh?: Lol + hello?: number + foo?: string + payload?: Uint8Array } export namespace Test { @@ -285,76 +282,69 @@ export namespace Test { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $meh = obj.meh - if ($meh != null) { - const prefixField6 = Uint8Array.from([50]) - const encodedField6 = Lol.codec().encode($meh) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + if (obj.meh != null) { + writer.uint32(50) + Lol.codec().encode(obj.meh, writer) } - const $hello = obj.hello - if ($hello != null) { - const prefixField3 = Uint8Array.from([24]) - const encodedField3 = uint32.encode($hello) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.hello != null) { + writer.uint32(24) + writer.uint32(obj.hello) } - const $foo = obj.foo - if ($foo != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($foo) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.foo != null) { + writer.uint32(10) + writer.string(obj.foo) } - const $payload = obj.payload - if ($payload != null) { - const prefixField7 = Uint8Array.from([58]) - const encodedField7 = bytes.encode($payload) - bufs.push(prefixField7, ...encodedField7.bufs) - length += prefixField7.byteLength + encodedField7.length + if (obj.payload != null) { + writer.uint32(58) + writer.bytes(obj.payload) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 6: + obj.meh = Lol.codec().decode(reader, reader.uint32()) + break + case 3: + obj.hello = reader.uint32() + break + case 1: + obj.foo = reader.string() + break + case 7: + obj.payload = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '6': { name: 'meh', codec: Lol.codec() }, - '3': { name: 'hello', codec: uint32 }, - '1': { name: 'foo', codec: string }, - '7': { name: 'payload', codec: bytes } + return obj }) } return _codec } - export const encode = (obj: Test): Uint8ArrayList => { + export const encode = (obj: Test): Uint8Array => { return encodeMessage(obj, Test.codec()) } diff --git a/packages/protons-runtime/package.json b/packages/protons-runtime/package.json index 4606b3b..d3fdf63 100644 --- a/packages/protons-runtime/package.json +++ b/packages/protons-runtime/package.json @@ -148,11 +148,7 @@ "release": "aegir release" }, "dependencies": { - "byte-access": "^1.0.1", - "longbits": "^1.1.0", - "uint8-varint": "^1.0.2", - "uint8arraylist": "^2.3.2", - "uint8arrays": "^3.1.0" + "uint8arraylist": "^2.3.2" }, "devDependencies": { "aegir": "^37.0.5" diff --git a/packages/protons-runtime/src/codec.ts b/packages/protons-runtime/src/codec.ts index 23d1ccc..5405013 100644 --- a/packages/protons-runtime/src/codec.ts +++ b/packages/protons-runtime/src/codec.ts @@ -1,4 +1,4 @@ -import type { Uint8ArrayList } from 'uint8arraylist' +import type { Writer, Reader } from './index.js' // https://developers.google.com/protocol-buffers/docs/encoding#structure export enum CODEC_TYPES { @@ -14,22 +14,12 @@ export interface EncodeOptions { lengthDelimited?: boolean } -export interface EncodeResult { - bufs: Uint8Array[] - length: number -} - export interface EncodeFunction { - (value: T, opts?: EncodeOptions): EncodeResult -} - -export interface DecodeResult { - value: T - length: number + (value: T, writer: Writer, opts?: EncodeOptions): void } export interface DecodeFunction { - (buf: Uint8ArrayList, offset: number): DecodeResult + (reader: Reader, length?: number): T } export interface Codec { diff --git a/packages/protons-runtime/src/codecs/bool.ts b/packages/protons-runtime/src/codecs/bool.ts deleted file mode 100644 index 2a2d7a2..0000000 --- a/packages/protons-runtime/src/codecs/bool.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -const encode: EncodeFunction = function boolEncode (value) { - return { - bufs: [ - Uint8Array.from([value ? 1 : 0]) - ], - length: 1 - } -} - -const decode: DecodeFunction = function boolDecode (buffer, offset) { - return { - value: buffer.get(offset) > 0, - length: 1 - } -} - -export const bool = createCodec('bool', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/bytes.ts b/packages/protons-runtime/src/codecs/bytes.ts deleted file mode 100644 index 8cf73b0..0000000 --- a/packages/protons-runtime/src/codecs/bytes.ts +++ /dev/null @@ -1,33 +0,0 @@ - -import { unsigned } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { allocUnsafe } from 'uint8arrays/alloc' - -const encode: EncodeFunction = function bytesEncode (val) { - const lenLen = unsigned.encodingLength(val.byteLength) - const buf = allocUnsafe(lenLen + val.byteLength) - unsigned.encode(val.byteLength, buf) - - buf.set(val, lenLen) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function bytesDecode (buf, offset) { - const byteLength = unsigned.decode(buf, offset) - const byteLengthLength = unsigned.encodingLength(byteLength) - offset += byteLengthLength - - return { - value: buf.subarray(offset, offset + byteLength), - length: byteLengthLength + byteLength - } -} - -export const bytes = createCodec('bytes', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) diff --git a/packages/protons-runtime/src/codecs/double.ts b/packages/protons-runtime/src/codecs/double.ts deleted file mode 100644 index 2bd3706..0000000 --- a/packages/protons-runtime/src/codecs/double.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 8 - -const encode: EncodeFunction = function doubleEncode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setFloat64(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function doubleDecode (buf, offset) { - return { - value: buf.getFloat64(offset, true), - length: ENCODING_LENGTH - } -} - -export const double = createCodec('double', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/enum.ts b/packages/protons-runtime/src/codecs/enum.ts index 15f5a59..6c3ec16 100644 --- a/packages/protons-runtime/src/codecs/enum.ts +++ b/packages/protons-runtime/src/codecs/enum.ts @@ -1,50 +1,28 @@ -import { unsigned } from 'uint8-varint' import { createCodec, CODEC_TYPES } from '../codec.js' import type { DecodeFunction, EncodeFunction, Codec } from '../codec.js' -import { allocUnsafe } from 'uint8arrays/alloc' export function enumeration (v: any): Codec { function findValue (val: string | number): number { + // Use the reverse mapping to look up the enum key for the stored value + // https://www.typescriptlang.org/docs/handbook/enums.html#reverse-mappings if (v[val.toString()] == null) { throw new Error('Invalid enum value') } - if (typeof val === 'number') { - return val - } - return v[val] } - const encode: EncodeFunction = function enumEncode (val) { + const encode: EncodeFunction = function enumEncode (val, writer) { const enumValue = findValue(val) - const buf = allocUnsafe(unsigned.encodingLength(enumValue)) - unsigned.encode(enumValue, buf) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } + writer.int32(enumValue) } - const decode: DecodeFunction = function enumDecode (buf, offset) { - const value = unsigned.decode(buf, offset) - const strValue = value.toString() + const decode: DecodeFunction = function enumDecode (reader) { + const val = reader.uint32() - // Use the reverse mapping to look up the enum key for the stored value - // https://www.typescriptlang.org/docs/handbook/enums.html#reverse-mappings - if (v[strValue] == null) { - throw new Error('Invalid enum value') - } - - return { - value: v[strValue], - length: unsigned.encodingLength(value) - } + return findValue(val) } // @ts-expect-error yeah yeah diff --git a/packages/protons-runtime/src/codecs/fixed32.ts b/packages/protons-runtime/src/codecs/fixed32.ts deleted file mode 100644 index dc1f487..0000000 --- a/packages/protons-runtime/src/codecs/fixed32.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 4 - -const encode: EncodeFunction = function fixed32Encode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setInt32(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function fixed32Decode (buf, offset) { - return { - value: buf.getInt32(offset, true), - length: ENCODING_LENGTH - } -} - -export const fixed32 = createCodec('fixed32', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/fixed64.ts b/packages/protons-runtime/src/codecs/fixed64.ts deleted file mode 100644 index eca0845..0000000 --- a/packages/protons-runtime/src/codecs/fixed64.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 8 - -const encode: EncodeFunction = function int64Encode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setBigInt64(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function int64Decode (buf, offset) { - return { - value: buf.getBigInt64(offset, true), - length: ENCODING_LENGTH - } -} - -export const fixed64 = createCodec('fixed64', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/float.ts b/packages/protons-runtime/src/codecs/float.ts deleted file mode 100644 index d1b2519..0000000 --- a/packages/protons-runtime/src/codecs/float.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 4 - -const encode: EncodeFunction = function floatEncode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setFloat32(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function floatDecode (buf, offset) { - return { - value: buf.getFloat32(offset, true), - length: ENCODING_LENGTH - } -} - -export const float = createCodec('float', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/int32.ts b/packages/protons-runtime/src/codecs/int32.ts deleted file mode 100644 index 1a1142f..0000000 --- a/packages/protons-runtime/src/codecs/int32.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { signed } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -function int32EncodingLength (val: number): number { - if (val < 0) { - return 10 // 10 bytes per spec - https://developers.google.com/protocol-buffers/docs/encoding#signed-ints - } - - return signed.encodingLength(val) -} - -const encode: EncodeFunction = function int32Encode (val) { - const buf = signed.encode(val, alloc(int32EncodingLength(val))) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function int32Decode (buf, offset) { - const value = signed.decode(buf, offset) | 0 - - return { - value, - length: int32EncodingLength(value) - } -} - -export const int32 = createCodec('int32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/int64.ts b/packages/protons-runtime/src/codecs/int64.ts deleted file mode 100644 index 534bd8d..0000000 --- a/packages/protons-runtime/src/codecs/int64.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { signed } from 'uint8-varint/big' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -function int64EncodingLength (val: bigint): number { - if (val < 0n) { - return 10 // 10 bytes per spec - https://developers.google.com/protocol-buffers/docs/encoding#signed-ints - } - - return signed.encodingLength(val) -} - -const encode: EncodeFunction = function int64Encode (val) { - const buf = signed.encode(val, alloc(int64EncodingLength(val))) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function int64Decode (buf, offset) { - const value = signed.decode(buf, offset) | 0n - - return { - value, - length: int64EncodingLength(value) - } -} - -export const int64 = createCodec('int64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/message.ts b/packages/protons-runtime/src/codecs/message.ts index d3a20d0..f7abf47 100644 --- a/packages/protons-runtime/src/codecs/message.ts +++ b/packages/protons-runtime/src/codecs/message.ts @@ -1,234 +1,11 @@ -import { unsigned } from 'uint8-varint' -import { createCodec, CODEC_TYPES, EncodeResult, EncodeOptions } from '../codec.js' -import type { DecodeFunction, Codec } from '../codec.js' -import type { FieldDef } from '../index.js' +import { createCodec, CODEC_TYPES, EncodeOptions } from '../codec.js' +import type { Codec } from '../codec.js' +import type { Reader, Writer } from '../index.js' export interface Factory { new (obj: A): T } -export function message (encode: (obj: T, opts?: EncodeOptions) => EncodeResult, fieldDefs: Record): Codec { - const decode: DecodeFunction = function messageDecode (buffer, offset) { - const length = unsigned.decode(buffer, offset) - const lengthLength = unsigned.encodingLength(length) - offset += lengthLength - const end = offset + length - const fields: any = {} - - while (offset < end) { - const key = unsigned.decode(buffer, offset) - offset += unsigned.encodingLength(key) - - const wireType = key & 0x7 - const fieldNumber = key >> 3 - const fieldDef = fieldDefs[fieldNumber.toString()] - let fieldLength = 0 - let value - - if (wireType === CODEC_TYPES.VARINT) { - if (fieldDef != null) { - // use the codec if it is available as this could be a bigint - const decoded = fieldDef.codec.decode(buffer, offset) - fieldLength = decoded.length - value = decoded.value - } else { - const value = unsigned.decode(buffer, offset) - fieldLength = unsigned.encodingLength(value) - } - } else if (wireType === CODEC_TYPES.BIT64) { - fieldLength = 8 - } else if (wireType === CODEC_TYPES.LENGTH_DELIMITED) { - const valueLength = unsigned.decode(buffer, offset) - fieldLength = valueLength + unsigned.encodingLength(valueLength) - } else if (wireType === CODEC_TYPES.BIT32) { - fieldLength = 4 - } else if (wireType === CODEC_TYPES.START_GROUP) { - throw new Error('Unsupported wire type START_GROUP') - } else if (wireType === CODEC_TYPES.END_GROUP) { - throw new Error('Unsupported wire type END_GROUP') - } - - if (fieldDef != null) { - if (value == null) { - const decoded = fieldDef.codec.decode(buffer, offset) - value = decoded.value - } - - if (fieldDef.repeats === true) { - if (fields[fieldDef.name] == null) { - fields[fieldDef.name] = [] - } - - fields[fieldDef.name].push(value) - } else { - fields[fieldDef.name] = value - } - } - - offset += fieldLength - } - - // make sure repeated fields have an array if not set - for (const fieldDef of Object.values(fieldDefs)) { - if (fieldDef.repeats === true && fields[fieldDef.name] == null) { - fields[fieldDef.name] = [] - } - } - - return { - value: fields, - length: lengthLength + length - } - } - - return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) -} - -/* -export function message (fieldDefs: FieldDef[]): Codec { - // create a id => FieldDef mapping for quick access - const fieldDefLookup: Record = {} - for (const def of fieldDefs) { - fieldDefLookup[def.id] = def - } - - const encode: EncodeFunction> = function messageEncode (val, opts = {}) { - const bufs: Uint8Array[] = [] - - if (opts.lengthDelimited === true) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - function encodeValue (value: any, fieldNumber: number, fieldDef: FieldDef) { - if (value == null) { - if (fieldDef.optional === true) { - return - } - - throw new Error(`Non optional field "${fieldDef.name}" was ${value === null ? 'null' : 'undefined'}`) - } - - const key = (fieldNumber << 3) | fieldDef.codec.type - const prefix = unsigned.encode(key) - const encoded = fieldDef.codec.encode(value) - - bufs.push(prefix, ...encoded.bufs) - length += encoded.length - length += prefix.byteLength - } - - for (let i = 0; i < fieldDefs.length; i++) { - const fieldDef = fieldDefs[i] - - if (fieldDef.repeats === true) { - if (!Array.isArray(val[fieldDef.name])) { - throw new Error(`Repeating field "${fieldDef.name}" was not an array`) - } - - for (const value of val[fieldDef.name]) { - encodeValue(value, fieldDef.id, fieldDef) - } - } else { - encodeValue(val[fieldDef.name], fieldDef.id, fieldDef) - } - } - - if (opts.lengthDelimited === true) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length - } - } - - return { - bufs, - length - } - } - - const decode: DecodeFunction = function messageDecode (buffer, offset) { - const length = unsigned.decode(buffer, offset) - const lengthLength = unsigned.encodingLength(length) - offset += lengthLength - const end = offset + length - const fields: any = {} - - while (offset < end) { - const key = unsigned.decode(buffer, offset) - offset += unsigned.encodingLength(key) - - const wireType = key & 0x7 - const fieldNumber = key >> 3 - const fieldDef = fieldDefLookup[fieldNumber] - let fieldLength = 0 - let value - - if (wireType === CODEC_TYPES.VARINT) { - if (fieldDef != null) { - // use the codec if it is available as this could be a bigint - const decoded = fieldDef.codec.decode(buffer, offset) - fieldLength = decoded.length - value = decoded.value - } else { - const value = unsigned.decode(buffer, offset) - fieldLength = unsigned.encodingLength(value) - } - } else if (wireType === CODEC_TYPES.BIT64) { - fieldLength = 8 - } else if (wireType === CODEC_TYPES.LENGTH_DELIMITED) { - const valueLength = unsigned.decode(buffer, offset) - fieldLength = valueLength + unsigned.encodingLength(valueLength) - } else if (wireType === CODEC_TYPES.BIT32) { - fieldLength = 4 - } else if (wireType === CODEC_TYPES.START_GROUP) { - throw new Error('Unsupported wire type START_GROUP') - } else if (wireType === CODEC_TYPES.END_GROUP) { - throw new Error('Unsupported wire type END_GROUP') - } - - if (fieldDef != null) { - if (value == null) { - const decoded = fieldDef.codec.decode(buffer, offset) - value = decoded.value - } - - if (fieldDef.repeats === true) { - if (fields[fieldDef.name] == null) { - fields[fieldDef.name] = [] - } - - fields[fieldDef.name].push(value) - } else { - fields[fieldDef.name] = value - } - } - - offset += fieldLength - } - - // make sure repeated fields have an array if not set - for (let i = 0; i < fieldDefs.length; i++) { - const fieldDef = fieldDefs[i] - - if (fieldDef.repeats === true && fields[fieldDef.name] == null) { - fields[fieldDef.name] = [] - } - } - - return { - value: fields, - length: lengthLength + length - } - } - +export function message (encode: (obj: T, writer: Writer, opts?: EncodeOptions) => void, decode: (reader: Reader, length?: number) => T): Codec { return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) } -*/ diff --git a/packages/protons-runtime/src/codecs/sfixed32.ts b/packages/protons-runtime/src/codecs/sfixed32.ts deleted file mode 100644 index 90a5e48..0000000 --- a/packages/protons-runtime/src/codecs/sfixed32.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 4 - -const encode: EncodeFunction = function sfixed32Encode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setInt32(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function sfixed32Decode (buf, offset) { - return { - value: buf.getInt32(offset, true), - length: ENCODING_LENGTH - } -} - -export const sfixed32 = createCodec('sfixed32', CODEC_TYPES.BIT32, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sfixed64.ts b/packages/protons-runtime/src/codecs/sfixed64.ts deleted file mode 100644 index 7f5418a..0000000 --- a/packages/protons-runtime/src/codecs/sfixed64.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' -import { alloc } from 'uint8arrays/alloc' - -const ENCODING_LENGTH = 8 - -const encode: EncodeFunction = function sfixed64Encode (val) { - const buf = alloc(ENCODING_LENGTH) - const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - view.setBigInt64(0, val, true) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function sfixed64Decode (buf, offset) { - return { - value: buf.getBigInt64(offset, true), - length: ENCODING_LENGTH - } -} - -export const sfixed64 = createCodec('sfixed64', CODEC_TYPES.BIT64, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sint32.ts b/packages/protons-runtime/src/codecs/sint32.ts deleted file mode 100644 index fed6906..0000000 --- a/packages/protons-runtime/src/codecs/sint32.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { zigzag } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -function sint32EncodingLength (val: number) { - return zigzag.encodingLength(val) -} - -const encode: EncodeFunction = function svarintEncode (val) { - const buf = zigzag.encode(val) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function svarintDecode (buf, offset) { - const value = zigzag.decode(buf, offset) - - return { - value, - length: sint32EncodingLength(value) - } -} - -export const sint32 = createCodec('sint32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/sint64.ts b/packages/protons-runtime/src/codecs/sint64.ts deleted file mode 100644 index 73e143e..0000000 --- a/packages/protons-runtime/src/codecs/sint64.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { zigzag } from 'uint8-varint/big' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -const encode: EncodeFunction = function int64Encode (val) { - const buf = zigzag.encode(val) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function int64Decode (buf, offset) { - const value = zigzag.decode(buf, offset) - - return { - value, - length: zigzag.encodingLength(value) - } -} - -export const sint64 = createCodec('sint64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/string.ts b/packages/protons-runtime/src/codecs/string.ts deleted file mode 100644 index f3ef2bb..0000000 --- a/packages/protons-runtime/src/codecs/string.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { unsigned } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import { allocUnsafe } from 'uint8arrays/alloc' -import * as utf8 from '../utils/utf8.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -const encode: EncodeFunction = function stringEncode (val) { - const strLen = utf8.length(val) - const lenLen = unsigned.encodingLength(strLen) - const buf = allocUnsafe(lenLen + strLen) - unsigned.encode(strLen, buf) - - utf8.write(val, buf, lenLen) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function stringDecode (buf, offset) { - const strLen = unsigned.decode(buf, offset) - offset += unsigned.encodingLength(strLen) - const b = buf.subarray(offset, offset + strLen) - const value = utf8.read(b, 0, b.byteLength) - - return { - value, - length: unsigned.encodingLength(strLen) + strLen - } -} - -export const string = createCodec('string', CODEC_TYPES.LENGTH_DELIMITED, encode, decode) diff --git a/packages/protons-runtime/src/codecs/uint32.ts b/packages/protons-runtime/src/codecs/uint32.ts deleted file mode 100644 index 566b755..0000000 --- a/packages/protons-runtime/src/codecs/uint32.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { unsigned } from 'uint8-varint' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -const encode: EncodeFunction = function uint32Encode (val) { - const buf = unsigned.encode(val) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function uint32Decode (buf, offset) { - const value = unsigned.decode(buf, offset) - - return { - value, - length: unsigned.encodingLength(value) - } -} - -export const uint32 = createCodec('uint32', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/codecs/uint64.ts b/packages/protons-runtime/src/codecs/uint64.ts deleted file mode 100644 index 2e83664..0000000 --- a/packages/protons-runtime/src/codecs/uint64.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { unsigned } from 'uint8-varint/big' -import { createCodec, CODEC_TYPES } from '../codec.js' -import type { DecodeFunction, EncodeFunction } from '../codec.js' - -const encode: EncodeFunction = function uint64Encode (val) { - const buf = unsigned.encode(val) - - return { - bufs: [ - buf - ], - length: buf.byteLength - } -} - -const decode: DecodeFunction = function uint64Decode (buf, offset) { - const value = unsigned.decode(buf, offset) - - return { - value, - length: unsigned.encodingLength(value) - } -} - -export const uint64 = createCodec('uint64', CODEC_TYPES.VARINT, encode, decode) diff --git a/packages/protons-runtime/src/decode.ts b/packages/protons-runtime/src/decode.ts index 81839fa..1298363 100644 --- a/packages/protons-runtime/src/decode.ts +++ b/packages/protons-runtime/src/decode.ts @@ -1,12 +1,25 @@ -import { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' +import type { Uint8ArrayList } from 'uint8arraylist' import type { Codec } from './codec.js' -import { allocUnsafe } from 'uint8arrays/alloc' +import pb from 'protobufjs' + +const Reader = pb.Reader + +// monkey patch the reader to add native bigint support +const methods = [ + 'uint64', 'int64', 'sint64', 'fixed64', 'sfixed64' +] +methods.forEach(method => { + // @ts-expect-error + const original = Reader.prototype[method] + // @ts-expect-error + Reader.prototype[method] = function (): bigint { + return BigInt(original.call(this).toString()) + } +}) export function decodeMessage (buf: Uint8Array | Uint8ArrayList, codec: Codec): T { - // wrap root message - const prefix = allocUnsafe(unsigned.encodingLength(buf.byteLength)) - unsigned.encode(buf.byteLength, prefix) + const reader = Reader.create(buf instanceof Uint8Array ? buf : buf.subarray()) - return codec.decode(new Uint8ArrayList(prefix, buf), 0).value + // @ts-expect-error + return codec.decode(reader) } diff --git a/packages/protons-runtime/src/encode.ts b/packages/protons-runtime/src/encode.ts index 78b541c..b9b742a 100644 --- a/packages/protons-runtime/src/encode.ts +++ b/packages/protons-runtime/src/encode.ts @@ -1,10 +1,28 @@ -import { Uint8ArrayList } from 'uint8arraylist' import type { Codec } from './codec.js' +import pb from 'protobufjs' -export function encodeMessage (message: T, codec: Codec): Uint8ArrayList { - const encoded = codec.encode(message, { +const Writer = pb.Writer + +// monkey patch the writer to add native bigint support +const methods = [ + 'uint64', 'int64', 'sint64', 'fixed64', 'sfixed64' +] +methods.forEach(method => { + // @ts-expect-error + const original = Writer.prototype[method] + // @ts-expect-error + Writer.prototype[method] = function (val: bigint): pb.Writer { + return original.call(this, val.toString()) + } +}) + +export function encodeMessage (message: T, codec: Codec): Uint8Array { + const w = Writer.create() + + // @ts-expect-error + codec.encode(message, w, { lengthDelimited: false }) - return Uint8ArrayList.fromUint8Arrays(encoded.bufs, encoded.length) + return w.finish() } diff --git a/packages/protons-runtime/src/index.ts b/packages/protons-runtime/src/index.ts index 6d24911..095bf20 100644 --- a/packages/protons-runtime/src/index.ts +++ b/packages/protons-runtime/src/index.ts @@ -16,21 +16,211 @@ export { encodeMessage } from './encode.js' -export { bool } from './codecs/bool.js' -export { bytes } from './codecs/bytes.js' -export { double } from './codecs/double.js' export { enumeration } from './codecs/enum.js' -export { fixed32 } from './codecs/fixed32.js' -export { fixed64 } from './codecs/fixed64.js' -export { float } from './codecs/float.js' -export { int32 } from './codecs/int32.js' -export { int64 } from './codecs/int64.js' export { message } from './codecs/message.js' -export { sfixed32 } from './codecs/sfixed32.js' -export { sfixed64 } from './codecs/sfixed64.js' -export { sint32 } from './codecs/sint32.js' -export { sint64 } from './codecs/sint64.js' -export { string } from './codecs/string.js' -export { uint32 } from './codecs/uint32.js' -export { uint64 } from './codecs/uint64.js' export type { Codec, EncodeOptions } from './codec.js' + +export interface Writer { + /** + * Current length + */ + len: number + + /** + * Writes an unsigned 32 bit value as a varint + */ + uint32: (value: number) => Writer + + /** + * Writes a signed 32 bit value as a varint` + */ + int32: (value: number) => Writer + + /** + * Writes a 32 bit value as a varint, zig-zag encoded + */ + sint32: (value: number) => Writer + + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64: (value: bigint) => Writer + + /** + * Writes a signed 64 bit value as a varint + */ + int64: (value: bigint) => Writer + + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64: (value: bigint) => Writer + + /** + * Writes a boolish value as a varint + */ + bool: (value: boolean) => Writer + + /** + * Writes an unsigned 32 bit value as fixed 32 bits + */ + fixed32: (value: number) => Writer + + /** + * Writes a signed 32 bit value as fixed 32 bits + */ + sfixed32: (value: number) => Writer + + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64: (value: bigint) => Writer + + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64: (value: bigint) => Writer + + /** + * Writes a float (32 bit) + */ + float: (value: number) => Writer + + /** + * Writes a double (64 bit float) + */ + double: (value: number) => Writer + + /** + * Writes a sequence of bytes + */ + bytes: (value: Uint8Array) => Writer + + /** + * Writes a string + */ + string: (value: string) => Writer + + /** + * Forks this writer's state by pushing it to a stack. + * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state. + */ + fork: () => Writer + + /** + * Resets this instance to the last state. + */ + reset: () => Writer + + /** + * Resets to the last state and appends the fork state's current write length as a varint followed by its operations. + */ + ldelim: () => Writer + + /** + * Finishes the write operation + */ + finish: () => Uint8Array +} + +export interface Reader { + /** + * Read buffer + */ + buf: Uint8Array + + /** + * Read buffer position + */ + pos: number + + /** + * Read buffer length + */ + len: number + + /** + * Reads a varint as an unsigned 32 bit value + */ + uint32: () => number + + /** + * Reads a varint as a signed 32 bit value + */ + int32: () => number + + /** + * Reads a zig-zag encoded varint as a signed 32 bit value + */ + sint32: () => number + + /** + * Reads a varint as a boolean + */ + bool: () => boolean + + /** + * Reads fixed 32 bits as an unsigned 32 bit integer + */ + fixed32: () => number + + /** + * Reads fixed 32 bits as a signed 32 bit integer + */ + sfixed32: () => number + + /** + * Reads a float (32 bit) as a number + */ + float: () => number + + /** + * Reads a double (64 bit float) as a number + */ + double: () => number + + /** + * Reads a sequence of bytes preceeded by its length as a varint + */ + bytes: () => number + + /** + * Reads a string preceeded by its byte length as a varint + */ + string: () => string + + /** + * Skips the specified number of bytes if specified, otherwise skips a varints` + */ + skip: (length?: number) => void + + /** + * Skips the next element of the specified wire type + */ + skipType: (wireType: number) => void + + /** + * Reads a varint as a signed 64 bit value + */ + int64: () => bigint + + /** + * Reads a varint as an unsigned 64 bit value + */ + uint64: () => bigint + + /** + * Reads a zig-zag encoded varint as a signed 64 bit value + */ + sint64: () => bigint + + /** + * Reads fixed 64 bits + */ + fixed64: () => bigint + + /** + * Reads zig-zag encoded fixed 64 bits + */ + sfixed64: () => bigint +} diff --git a/packages/protons-runtime/src/utils/utf8.ts b/packages/protons-runtime/src/utils/utf8.ts deleted file mode 100644 index 1efa206..0000000 --- a/packages/protons-runtime/src/utils/utf8.ts +++ /dev/null @@ -1,123 +0,0 @@ -/** - * A minimal UTF8 implementation for number arrays. - * - * @memberof util - * @namespace - */ - -/** - * Calculates the UTF8 byte length of a string. - * - * @param {string} string - String - * @returns {number} Byte length - */ -export function length (string: string) { - let len = 0 - let c = 0 - for (let i = 0; i < string.length; ++i) { - c = string.charCodeAt(i) - - if (c < 128) { - len += 1 - } else if (c < 2048) { - len += 2 - } else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { - ++i - len += 4 - } else { - len += 3 - } - } - - return len -} - -/** - * Reads UTF8 bytes as a string. - * - * @param {Uint8Array} buffer - Source buffer - * @param {number} start - Source start - * @param {number} end - Source end - * @returns {string} String read - */ -export function read (buffer: Uint8Array, start: number, end: number) { - const len = end - start - - if (len < 1) { - return '' - } - - let parts: string[] | undefined - const chunk: number[] = [] - let i = 0 // char offset - let t: number // temporary - - while (start < end) { - t = buffer[start++] - - if (t < 128) { - chunk[i++] = t - } else if (t > 191 && t < 224) { - chunk[i++] = (t & 31) << 6 | buffer[start++] & 63 - } else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000 - chunk[i++] = 0xD800 + (t >> 10) - chunk[i++] = 0xDC00 + (t & 1023) - } else { - chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63 - } - - if (i > 8191) { - (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk)) - i = 0 - } - } - - if (parts != null) { - if (i > 0) { - parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))) - } - - return parts.join('') - } - - return String.fromCharCode.apply(String, chunk.slice(0, i)) -} - -/** - * Writes a string as UTF8 bytes. - * - * @param {string} string - Source string - * @param {Uint8Array} buffer - Destination buffer - * @param {number} offset - Destination offset - * @returns {number} Bytes written - */ -export function write (string: string, buffer: Uint8Array, offset: number) { - const start = offset - let c1 // character 1 - let c2 // character 2 - - for (let i = 0; i < string.length; ++i) { - c1 = string.charCodeAt(i) - - if (c1 < 128) { - buffer[offset++] = c1 - } else if (c1 < 2048) { - buffer[offset++] = c1 >> 6 | 192 - buffer[offset++] = c1 & 63 | 128 - } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { - c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF) - ++i - buffer[offset++] = c1 >> 18 | 240 - buffer[offset++] = c1 >> 12 & 63 | 128 - buffer[offset++] = c1 >> 6 & 63 | 128 - buffer[offset++] = c1 & 63 | 128 - } else { - buffer[offset++] = c1 >> 12 | 224 - buffer[offset++] = c1 >> 6 & 63 | 128 - buffer[offset++] = c1 & 63 | 128 - } - } - - return offset - start -} diff --git a/packages/protons/package.json b/packages/protons/package.json index ef5c2b7..1bb60fb 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -154,12 +154,13 @@ }, "dependencies": { "meow": "^10.1.2", - "protobufjs": "^6.11.2" + "protobufjs-cli": "^1.0.0" }, "devDependencies": { "aegir": "^37.0.5", "long": "^5.2.0", "pbjs": "^0.0.14", + "protobufjs": "^7.0.0", "protons-runtime": "^2.0.0", "uint8arraylist": "^2.3.2" } diff --git a/packages/protons/src/index.ts b/packages/protons/src/index.ts index 9046380..2b03b25 100644 --- a/packages/protons/src/index.ts +++ b/packages/protons/src/index.ts @@ -1,8 +1,7 @@ -import { main as pbjs } from 'protobufjs/cli/pbjs.js' +import { main as pbjs } from 'protobufjs-cli/pbjs.js' import path from 'path' import { promisify } from 'util' import fs from 'fs/promises' -import { unsigned } from 'uint8-varint' export enum CODEC_TYPES { VARINT = 0, @@ -19,21 +18,59 @@ function pathWithExtension (input: string, extension: string, outputDir?: string } const types: Record = { + bool: 'boolean', + bytes: 'Uint8Array', double: 'number', + fixed32: 'number', + fixed64: 'bigint', float: 'number', int32: 'number', int64: 'bigint', - uint32: 'number', - uint64: 'bigint', - sint32: 'number', - sint64: 'bigint', - fixed32: 'number', - fixed64: 'bigint', sfixed32: 'number', sfixed64: 'bigint', - bool: 'boolean', + sint32: 'number', + sint64: 'bigint', string: 'string', - bytes: 'Uint8Array' + uint32: 'number', + uint64: 'bigint' +} + +const encoderGenerators: Record string> = { + bool: (val) => `writer.bool(${val})`, + bytes: (val) => `writer.bytes(${val})`, + double: (val) => `writer.double(${val})`, + // enumeration: (val) => `writer.double(${val})`, + fixed32: (val) => `writer.fixed32(${val})`, + fixed64: (val) => `writer.fixed64(${val})`, + float: (val) => `writer.float(${val})`, + int32: (val) => `writer.int32(${val})`, + int64: (val) => `writer.int64(${val})`, + sfixed32: (val) => `writer.sfixed32(${val})`, + sfixed64: (val) => `writer.sfixed64(${val})`, + sint32: (val) => `writer.sint32(${val})`, + sint64: (val) => `writer.sint64(${val})`, + string: (val) => `writer.string(${val})`, + uint32: (val) => `writer.uint32(${val})`, + uint64: (val) => `writer.uint64(${val})` +} + +const decoderGenerators: Record string> = { + bool: () => 'reader.bool()', + bytes: () => 'reader.bytes()', + double: () => 'reader.double()', + // enumeration: () => `writer.double(${val})`, + fixed32: () => 'reader.fixed32()', + fixed64: () => 'reader.fixed64()', + float: () => 'reader.float()', + int32: () => 'reader.int32()', + int64: () => 'reader.int64()', + sfixed32: () => 'reader.sfixed32()', + sfixed64: () => 'reader.sfixed64()', + sint32: () => 'reader.sint32()', + sint64: () => 'reader.sint64()', + string: () => 'reader.string()', + uint32: () => 'reader.uint32()', + uint64: () => 'reader.uint64()' } function findTypeName (typeName: string, classDef: MessageDef, moduleDef: ModuleDef): string { @@ -144,14 +181,13 @@ interface FieldDef { id: number options?: Record rule: string + optional: boolean + repeated: boolean } function defineFields (fields: Record, messageDef: MessageDef, moduleDef: ModuleDef) { return Object.entries(fields).map(([fieldName, fieldDef]) => { - const isArray = fieldDef.rule === 'repeated' - const isOptional = !isArray && fieldDef.options?.proto3_optional === true - - return `${fieldName}${isOptional ? '?' : ''}: ${findTypeName(fieldDef.type, messageDef, moduleDef)}${isArray ? '[]' : ''}` + return `${fieldName}${fieldDef.optional ? '?' : ''}: ${findTypeName(fieldDef.type, messageDef, moduleDef)}${fieldDef.repeated ? '[]' : ''}` }) } @@ -219,110 +255,132 @@ export interface ${messageDef.name} { .trim() } }` + + const ensureArrayProps = Object.entries(fields) + .map(([name, fieldDef]) => { + // make sure repeated fields have an array if not set + if (fieldDef.rule === 'repeated') { + return ` obj.${name} = obj.${name} ?? []` + } + + return '' + }).filter(Boolean).join('\n') + + const ensureRequiredFields = Object.entries(fields) + .map(([name, fieldDef]) => { + // make sure required fields are set + if (!fieldDef.optional) { + return ` + if (obj.${name} == null) { + throw new Error('Protocol error: value for required field "${name}" was not found in protobuf') + }` + } + + return '' + }).filter(Boolean).join('\n') + interfaceCodecDef = ` let _codec: Codec<${messageDef.name}> export const codec = (): Codec<${messageDef.name}> => { if (_codec == null) { - _codec = message<${messageDef.name}>((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message<${messageDef.name}>((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - - let length = 0 - ${Object.entries(fields) - .map(([name, fieldDef]) => { - let codec = encoders[fieldDef.type] - let type: string = fieldDef.type - - if (codec == null) { - const def = findDef(fieldDef.type, messageDef, moduleDef) - - if (isEnumDef(def)) { - moduleDef.imports.add('enumeration') - type = 'enum' - } else { - moduleDef.imports.add('message') - type = 'message' - } - - const typeName = findTypeName(fieldDef.type, messageDef, moduleDef) - codec = `${typeName}.codec()` - } else { - moduleDef.imports.add(codec) - } - - if (fieldDef.rule === 'repeated') { - return ` - const $${name} = obj.${name} - if ($${name} != null) { - for (const value of $${name}) { - const prefixField${fieldDef.id} = Uint8Array.from([${unsigned.encode((fieldDef.id << 3) | codecTypes[type]).join(', ')}]) - const encodedField${fieldDef.id} = ${codec}.encode(value) - bufs.push(prefixField${fieldDef.id}, ...encodedField${fieldDef.id}.bufs) - length += prefixField${fieldDef.id}.byteLength + encodedField${fieldDef.id}.length +${Object.entries(fields) + .map(([name, fieldDef]) => { + let codec: string = encoders[fieldDef.type] + let type: string = fieldDef.type + + if (codec == null) { + const def = findDef(fieldDef.type, messageDef, moduleDef) + + if (isEnumDef(def)) { + moduleDef.imports.add('enumeration') + type = 'enum' + } else { + moduleDef.imports.add('message') + type = 'message' } - }` - } - - return ` - const $${name} = obj.${name} - if ($${name} != null) { - const prefixField${fieldDef.id} = Uint8Array.from([${unsigned.encode((fieldDef.id << 3) | codecTypes[type]).join(', ')}]) - const encodedField${fieldDef.id} = ${codec}.encode($${name}) - bufs.push(prefixField${fieldDef.id}, ...encodedField${fieldDef.id}.bufs) - length += prefixField${fieldDef.id}.byteLength + encodedField${fieldDef.id}.length - }` - }).join('\n')} - if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength + const typeName = findTypeName(fieldDef.type, messageDef, moduleDef) + codec = `${typeName}.codec()` + } - return { - bufs, - length - } + return ` + if (obj.${name} != null) {${ + fieldDef.rule === 'repeated' +? ` + for (const value of obj.${name}) { + writer.uint32(${(fieldDef.id << 3) | codecTypes[type]}) + ${encoderGenerators[type] == null ? `${codec}.encode(value, writer)` : encoderGenerators[type]('value')} + }` +: ` + writer.uint32(${(fieldDef.id << 3) | codecTypes[type]}) + ${encoderGenerators[type] == null ? `${codec}.encode(obj.${name}, writer)` : encoderGenerators[type](`obj.${name}`)}` } + }${fieldDef.optional +? '' +: ` else { + throw new Error('Protocol error: required field "${name}" was not found in object') + }`}` +}).join('\n')} - return { - bufs, - length + if (opts.lengthDelimited !== false) { + writer.ldelim() } - }, { - ${Object.entries(fields) - .map(([name, fieldDef]) => { - let codec = encoders[fieldDef.type] - - if (codec == null) { - const def = findDef(fieldDef.type, messageDef, moduleDef) - - if (isEnumDef(def)) { - moduleDef.imports.add('enumeration') - } else { - moduleDef.imports.add('message') - } - - const typeName = findTypeName(fieldDef.type, messageDef, moduleDef) - codec = `${typeName}.codec()` - } else { - moduleDef.imports.add(codec) - } - - return `'${fieldDef.id}': { name: '${name}', codec: ${codec}${fieldDef.options?.proto3_optional === true ? ', optional: true' : ''}${fieldDef.rule === 'repeated' ? ', repeats: true' : ''} }` - }).join(',\n ')} + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + ${Object.entries(fields) + .map(([name, fieldDef]) => { + let codec: string = encoders[fieldDef.type] + let type: string = fieldDef.type + + if (codec == null) { + const def = findDef(fieldDef.type, messageDef, moduleDef) + + if (isEnumDef(def)) { + moduleDef.imports.add('enumeration') + type = 'enum' + } else { + moduleDef.imports.add('message') + type = 'message' + } + + const typeName = findTypeName(fieldDef.type, messageDef, moduleDef) + codec = `${typeName}.codec()` + } + + return `case ${fieldDef.id}:${fieldDef.rule === 'repeated' +? ` + obj.${name} = obj.${name} ?? [] + obj.${name}.push(${decoderGenerators[type] == null ? `${codec}.decode(reader${type === 'message' ? ', reader.uint32()' : ''})` : decoderGenerators[type]()})` +: ` + obj.${name} = ${decoderGenerators[type] == null ? `${codec}.decode(reader${type === 'message' ? ', reader.uint32()' : ''})` : decoderGenerators[type]()}`} + break` + }).join('\n ')} + default: + reader.skipType(tag & 7) + break + } + }${ensureArrayProps !== '' ? `\n\n${ensureArrayProps}` : ''}${ensureRequiredFields !== '' ? `\n${ensureRequiredFields}` : ''} + + return obj }) } return _codec } - export const encode = (obj: ${messageDef.name}): Uint8ArrayList => { + export const encode = (obj: ${messageDef.name}): Uint8Array => { return encodeMessage(obj, ${messageDef.name}.codec()) } @@ -375,6 +433,13 @@ function defineModule (def: ClassDef): ModuleDef { defineMessage(classDef.nested, classDef) } + if (classDef.fields != null) { + for (const name of Object.keys(classDef.fields)) { + classDef.fields[name].repeated = classDef.fields[name].rule === 'repeated' + classDef.fields[name].optional = !classDef.fields[name].repeated && classDef.fields[name].options?.proto3_optional === true + } + } + if (parent == null) { moduleDef.globals[className] = classDef } @@ -419,7 +484,6 @@ export async function generate (source: string, flags: Flags) { if (moduleDef.imports.has('encodeMessage')) { lines.push("import type { Uint8ArrayList } from 'uint8arraylist'") - lines.push("import { unsigned } from 'uint8-varint'") } if (moduleDef.importedTypes.size > 0) { diff --git a/packages/protons/test/fixtures/basic.ts b/packages/protons/test/fixtures/basic.ts index 60d4d56..0950495 100644 --- a/packages/protons/test/fixtures/basic.ts +++ b/packages/protons/test/fixtures/basic.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { encodeMessage, decodeMessage, message, string, int32 } from 'protons-runtime' +import { encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface Basic { @@ -16,58 +15,59 @@ export namespace Basic { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $foo = obj.foo - if ($foo != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($foo) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.foo != null) { + writer.uint32(10) + writer.string(obj.foo) } - const $num = obj.num - if ($num != null) { - const prefixField2 = Uint8Array.from([16]) - const encodedField2 = int32.encode($num) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.num != null) { + writer.uint32(16) + writer.int32(obj.num) + } else { + throw new Error('Protocol error: required field "num" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} - bufs[0] = prefix - length += prefix.byteLength + const end = length == null ? reader.len : reader.pos + length - return { - bufs, - length + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.foo = reader.string() + break + case 2: + obj.num = reader.int32() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.num == null) { + throw new Error('Protocol error: value for required field "num" was not found in protobuf') } - }, { - '1': { name: 'foo', codec: string, optional: true }, - '2': { name: 'num', codec: int32 } + + return obj }) } return _codec } - export const encode = (obj: Basic): Uint8ArrayList => { + export const encode = (obj: Basic): Uint8Array => { return encodeMessage(obj, Basic.codec()) } diff --git a/packages/protons/test/fixtures/circuit.ts b/packages/protons/test/fixtures/circuit.ts index 37a52da..44b0127 100644 --- a/packages/protons/test/fixtures/circuit.ts +++ b/packages/protons/test/fixtures/circuit.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { enumeration, encodeMessage, decodeMessage, message, bytes } from 'protons-runtime' +import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface CircuitRelay { @@ -88,60 +87,70 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $id = obj.id - if ($id != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($id) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.id != null) { + writer.uint32(10) + writer.bytes(obj.id) + } else { + throw new Error('Protocol error: required field "id" was not found in object') } - const $addrs = obj.addrs - if ($addrs != null) { - for (const value of $addrs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.addrs != null) { + for (const value of obj.addrs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "addrs" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs = obj.addrs ?? [] + obj.addrs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.addrs = obj.addrs ?? [] - return { - bufs, - length - } + if (obj.id == null) { + throw new Error('Protocol error: value for required field "id" was not found in protobuf') } - return { - bufs, - length + if (obj.addrs == null) { + throw new Error('Protocol error: value for required field "addrs" was not found in protobuf') } - }, { - '1': { name: 'id', codec: bytes }, - '2': { name: 'addrs', codec: bytes, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: Peer): Uint8ArrayList => { + export const encode = (obj: Peer): Uint8Array => { return encodeMessage(obj, Peer.codec()) } @@ -154,76 +163,69 @@ export namespace CircuitRelay { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = CircuitRelay.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + CircuitRelay.Type.codec().encode(obj.type, writer) } - const $srcPeer = obj.srcPeer - if ($srcPeer != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = CircuitRelay.Peer.codec().encode($srcPeer) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.srcPeer != null) { + writer.uint32(18) + CircuitRelay.Peer.codec().encode(obj.srcPeer, writer) } - const $dstPeer = obj.dstPeer - if ($dstPeer != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = CircuitRelay.Peer.codec().encode($dstPeer) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.dstPeer != null) { + writer.uint32(26) + CircuitRelay.Peer.codec().encode(obj.dstPeer, writer) } - const $code = obj.code - if ($code != null) { - const prefixField4 = Uint8Array.from([32]) - const encodedField4 = CircuitRelay.Status.codec().encode($code) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.code != null) { + writer.uint32(32) + CircuitRelay.Status.codec().encode(obj.code, writer) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = CircuitRelay.Type.codec().decode(reader) + break + case 2: + obj.srcPeer = CircuitRelay.Peer.codec().decode(reader, reader.uint32()) + break + case 3: + obj.dstPeer = CircuitRelay.Peer.codec().decode(reader, reader.uint32()) + break + case 4: + obj.code = CircuitRelay.Status.codec().decode(reader) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '1': { name: 'type', codec: CircuitRelay.Type.codec(), optional: true }, - '2': { name: 'srcPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - '3': { name: 'dstPeer', codec: CircuitRelay.Peer.codec(), optional: true }, - '4': { name: 'code', codec: CircuitRelay.Status.codec(), optional: true } + return obj }) } return _codec } - export const encode = (obj: CircuitRelay): Uint8ArrayList => { + export const encode = (obj: CircuitRelay): Uint8Array => { return encodeMessage(obj, CircuitRelay.codec()) } diff --git a/packages/protons/test/fixtures/daemon.ts b/packages/protons/test/fixtures/daemon.ts index 69aac11..e2f0627 100644 --- a/packages/protons/test/fixtures/daemon.ts +++ b/packages/protons/test/fixtures/daemon.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { enumeration, encodeMessage, decodeMessage, message, bytes, int64, string, int32 } from 'protons-runtime' +import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface Request { @@ -55,121 +54,115 @@ export namespace Request { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = Request.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + Request.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') } - const $connect = obj.connect - if ($connect != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = ConnectRequest.codec().encode($connect) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.connect != null) { + writer.uint32(18) + ConnectRequest.codec().encode(obj.connect, writer) } - const $streamOpen = obj.streamOpen - if ($streamOpen != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = StreamOpenRequest.codec().encode($streamOpen) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.streamOpen != null) { + writer.uint32(26) + StreamOpenRequest.codec().encode(obj.streamOpen, writer) } - const $streamHandler = obj.streamHandler - if ($streamHandler != null) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = StreamHandlerRequest.codec().encode($streamHandler) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.streamHandler != null) { + writer.uint32(34) + StreamHandlerRequest.codec().encode(obj.streamHandler, writer) } - const $dht = obj.dht - if ($dht != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = DHTRequest.codec().encode($dht) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.dht != null) { + writer.uint32(42) + DHTRequest.codec().encode(obj.dht, writer) } - const $connManager = obj.connManager - if ($connManager != null) { - const prefixField6 = Uint8Array.from([50]) - const encodedField6 = ConnManagerRequest.codec().encode($connManager) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + if (obj.connManager != null) { + writer.uint32(50) + ConnManagerRequest.codec().encode(obj.connManager, writer) } - const $disconnect = obj.disconnect - if ($disconnect != null) { - const prefixField7 = Uint8Array.from([58]) - const encodedField7 = DisconnectRequest.codec().encode($disconnect) - bufs.push(prefixField7, ...encodedField7.bufs) - length += prefixField7.byteLength + encodedField7.length + if (obj.disconnect != null) { + writer.uint32(58) + DisconnectRequest.codec().encode(obj.disconnect, writer) } - const $pubsub = obj.pubsub - if ($pubsub != null) { - const prefixField8 = Uint8Array.from([66]) - const encodedField8 = PSRequest.codec().encode($pubsub) - bufs.push(prefixField8, ...encodedField8.bufs) - length += prefixField8.byteLength + encodedField8.length + if (obj.pubsub != null) { + writer.uint32(66) + PSRequest.codec().encode(obj.pubsub, writer) } - const $peerStore = obj.peerStore - if ($peerStore != null) { - const prefixField9 = Uint8Array.from([74]) - const encodedField9 = PeerstoreRequest.codec().encode($peerStore) - bufs.push(prefixField9, ...encodedField9.bufs) - length += prefixField9.byteLength + encodedField9.length + if (obj.peerStore != null) { + writer.uint32(74) + PeerstoreRequest.codec().encode(obj.peerStore, writer) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = Request.Type.codec().decode(reader) + break + case 2: + obj.connect = ConnectRequest.codec().decode(reader, reader.uint32()) + break + case 3: + obj.streamOpen = StreamOpenRequest.codec().decode(reader, reader.uint32()) + break + case 4: + obj.streamHandler = StreamHandlerRequest.codec().decode(reader, reader.uint32()) + break + case 5: + obj.dht = DHTRequest.codec().decode(reader, reader.uint32()) + break + case 6: + obj.connManager = ConnManagerRequest.codec().decode(reader, reader.uint32()) + break + case 7: + obj.disconnect = DisconnectRequest.codec().decode(reader, reader.uint32()) + break + case 8: + obj.pubsub = PSRequest.codec().decode(reader, reader.uint32()) + break + case 9: + obj.peerStore = PeerstoreRequest.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '1': { name: 'type', codec: Request.Type.codec() }, - '2': { name: 'connect', codec: ConnectRequest.codec(), optional: true }, - '3': { name: 'streamOpen', codec: StreamOpenRequest.codec(), optional: true }, - '4': { name: 'streamHandler', codec: StreamHandlerRequest.codec(), optional: true }, - '5': { name: 'dht', codec: DHTRequest.codec(), optional: true }, - '6': { name: 'connManager', codec: ConnManagerRequest.codec(), optional: true }, - '7': { name: 'disconnect', codec: DisconnectRequest.codec(), optional: true }, - '8': { name: 'pubsub', codec: PSRequest.codec(), optional: true }, - '9': { name: 'peerStore', codec: PeerstoreRequest.codec(), optional: true } + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') + } + + return obj }) } return _codec } - export const encode = (obj: Request): Uint8ArrayList => { + export const encode = (obj: Request): Uint8Array => { return encodeMessage(obj, Request.codec()) } @@ -210,114 +203,118 @@ export namespace Response { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = Response.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $error = obj.error - if ($error != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = ErrorResponse.codec().encode($error) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length - } - - const $streamInfo = obj.streamInfo - if ($streamInfo != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = StreamInfo.codec().encode($streamInfo) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length - } - - const $identify = obj.identify - if ($identify != null) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = IdentifyResponse.codec().encode($identify) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length - } - - const $dht = obj.dht - if ($dht != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = DHTResponse.codec().encode($dht) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length - } - - const $peers = obj.peers - if ($peers != null) { - for (const value of $peers) { - const prefixField6 = Uint8Array.from([50]) - const encodedField6 = PeerInfo.codec().encode(value) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + writer.fork() + } + + if (obj.type != null) { + writer.uint32(8) + Response.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') + } + + if (obj.error != null) { + writer.uint32(18) + ErrorResponse.codec().encode(obj.error, writer) + } + + if (obj.streamInfo != null) { + writer.uint32(26) + StreamInfo.codec().encode(obj.streamInfo, writer) + } + + if (obj.identify != null) { + writer.uint32(34) + IdentifyResponse.codec().encode(obj.identify, writer) + } + + if (obj.dht != null) { + writer.uint32(42) + DHTResponse.codec().encode(obj.dht, writer) + } + + if (obj.peers != null) { + for (const value of obj.peers) { + writer.uint32(50) + PeerInfo.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "peers" was not found in object') } - const $pubsub = obj.pubsub - if ($pubsub != null) { - const prefixField7 = Uint8Array.from([58]) - const encodedField7 = PSResponse.codec().encode($pubsub) - bufs.push(prefixField7, ...encodedField7.bufs) - length += prefixField7.byteLength + encodedField7.length + if (obj.pubsub != null) { + writer.uint32(58) + PSResponse.codec().encode(obj.pubsub, writer) } - const $peerStore = obj.peerStore - if ($peerStore != null) { - const prefixField8 = Uint8Array.from([66]) - const encodedField8 = PeerstoreResponse.codec().encode($peerStore) - bufs.push(prefixField8, ...encodedField8.bufs) - length += prefixField8.byteLength + encodedField8.length + if (obj.peerStore != null) { + writer.uint32(66) + PeerstoreResponse.codec().encode(obj.peerStore, writer) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = Response.Type.codec().decode(reader) + break + case 2: + obj.error = ErrorResponse.codec().decode(reader, reader.uint32()) + break + case 3: + obj.streamInfo = StreamInfo.codec().decode(reader, reader.uint32()) + break + case 4: + obj.identify = IdentifyResponse.codec().decode(reader, reader.uint32()) + break + case 5: + obj.dht = DHTResponse.codec().decode(reader, reader.uint32()) + break + case 6: + obj.peers = obj.peers ?? [] + obj.peers.push(PeerInfo.codec().decode(reader, reader.uint32())) + break + case 7: + obj.pubsub = PSResponse.codec().decode(reader, reader.uint32()) + break + case 8: + obj.peerStore = PeerstoreResponse.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.peers = obj.peers ?? [] - return { - bufs, - length - } + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - return { - bufs, - length + if (obj.peers == null) { + throw new Error('Protocol error: value for required field "peers" was not found in protobuf') } - }, { - '1': { name: 'type', codec: Response.Type.codec() }, - '2': { name: 'error', codec: ErrorResponse.codec(), optional: true }, - '3': { name: 'streamInfo', codec: StreamInfo.codec(), optional: true }, - '4': { name: 'identify', codec: IdentifyResponse.codec(), optional: true }, - '5': { name: 'dht', codec: DHTResponse.codec(), optional: true }, - '6': { name: 'peers', codec: PeerInfo.codec(), repeats: true }, - '7': { name: 'pubsub', codec: PSResponse.codec(), optional: true }, - '8': { name: 'peerStore', codec: PeerstoreResponse.codec(), optional: true } + + return obj }) } return _codec } - export const encode = (obj: Response): Uint8ArrayList => { + export const encode = (obj: Response): Uint8Array => { return encodeMessage(obj, Response.codec()) } @@ -336,60 +333,70 @@ export namespace IdentifyResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $id = obj.id - if ($id != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($id) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $addrs = obj.addrs - if ($addrs != null) { - for (const value of $addrs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + writer.fork() + } + + if (obj.id != null) { + writer.uint32(10) + writer.bytes(obj.id) + } else { + throw new Error('Protocol error: required field "id" was not found in object') + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "addrs" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs = obj.addrs ?? [] + obj.addrs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.addrs = obj.addrs ?? [] - return { - bufs, - length - } + if (obj.id == null) { + throw new Error('Protocol error: value for required field "id" was not found in protobuf') } - return { - bufs, - length + if (obj.addrs == null) { + throw new Error('Protocol error: value for required field "addrs" was not found in protobuf') } - }, { - '1': { name: 'id', codec: bytes }, - '2': { name: 'addrs', codec: bytes, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: IdentifyResponse): Uint8ArrayList => { + export const encode = (obj: IdentifyResponse): Uint8Array => { return encodeMessage(obj, IdentifyResponse.codec()) } @@ -409,69 +416,78 @@ export namespace ConnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $peer = obj.peer - if ($peer != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($peer) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $addrs = obj.addrs - if ($addrs != null) { - for (const value of $addrs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + writer.fork() + } + + if (obj.peer != null) { + writer.uint32(10) + writer.bytes(obj.peer) + } else { + throw new Error('Protocol error: required field "peer" was not found in object') + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "addrs" was not found in object') } - const $timeout = obj.timeout - if ($timeout != null) { - const prefixField3 = Uint8Array.from([24]) - const encodedField3 = int64.encode($timeout) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.timeout != null) { + writer.uint32(24) + writer.int64(obj.timeout) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.addrs = obj.addrs ?? [] + obj.addrs.push(reader.bytes()) + break + case 3: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.addrs = obj.addrs ?? [] - return { - bufs, - length - } + if (obj.peer == null) { + throw new Error('Protocol error: value for required field "peer" was not found in protobuf') } - return { - bufs, - length + if (obj.addrs == null) { + throw new Error('Protocol error: value for required field "addrs" was not found in protobuf') } - }, { - '1': { name: 'peer', codec: bytes }, - '2': { name: 'addrs', codec: bytes, repeats: true }, - '3': { name: 'timeout', codec: int64, optional: true } + + return obj }) } return _codec } - export const encode = (obj: ConnectRequest): Uint8ArrayList => { + export const encode = (obj: ConnectRequest): Uint8Array => { return encodeMessage(obj, ConnectRequest.codec()) } @@ -491,69 +507,78 @@ export namespace StreamOpenRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $peer = obj.peer - if ($peer != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($peer) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $proto = obj.proto - if ($proto != null) { - for (const value of $proto) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = string.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + writer.fork() + } + + if (obj.peer != null) { + writer.uint32(10) + writer.bytes(obj.peer) + } else { + throw new Error('Protocol error: required field "peer" was not found in object') + } + + if (obj.proto != null) { + for (const value of obj.proto) { + writer.uint32(18) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "proto" was not found in object') } - const $timeout = obj.timeout - if ($timeout != null) { - const prefixField3 = Uint8Array.from([24]) - const encodedField3 = int64.encode($timeout) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.timeout != null) { + writer.uint32(24) + writer.int64(obj.timeout) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.proto = obj.proto ?? [] + obj.proto.push(reader.string()) + break + case 3: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.proto = obj.proto ?? [] - return { - bufs, - length - } + if (obj.peer == null) { + throw new Error('Protocol error: value for required field "peer" was not found in protobuf') } - return { - bufs, - length + if (obj.proto == null) { + throw new Error('Protocol error: value for required field "proto" was not found in protobuf') } - }, { - '1': { name: 'peer', codec: bytes }, - '2': { name: 'proto', codec: string, repeats: true }, - '3': { name: 'timeout', codec: int64, optional: true } + + return obj }) } return _codec } - export const encode = (obj: StreamOpenRequest): Uint8ArrayList => { + export const encode = (obj: StreamOpenRequest): Uint8Array => { return encodeMessage(obj, StreamOpenRequest.codec()) } @@ -572,60 +597,70 @@ export namespace StreamHandlerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $addr = obj.addr - if ($addr != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($addr) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $proto = obj.proto - if ($proto != null) { - for (const value of $proto) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = string.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + writer.fork() + } + + if (obj.addr != null) { + writer.uint32(10) + writer.bytes(obj.addr) + } else { + throw new Error('Protocol error: required field "addr" was not found in object') + } + + if (obj.proto != null) { + for (const value of obj.proto) { + writer.uint32(18) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "proto" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.addr = reader.bytes() + break + case 2: + obj.proto = obj.proto ?? [] + obj.proto.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.proto = obj.proto ?? [] - return { - bufs, - length - } + if (obj.addr == null) { + throw new Error('Protocol error: value for required field "addr" was not found in protobuf') } - return { - bufs, - length + if (obj.proto == null) { + throw new Error('Protocol error: value for required field "proto" was not found in protobuf') } - }, { - '1': { name: 'addr', codec: bytes }, - '2': { name: 'proto', codec: string, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: StreamHandlerRequest): Uint8ArrayList => { + export const encode = (obj: StreamHandlerRequest): Uint8Array => { return encodeMessage(obj, StreamHandlerRequest.codec()) } @@ -643,49 +678,51 @@ export namespace ErrorResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $msg = obj.msg - if ($msg != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($msg) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.msg != null) { + writer.uint32(10) + writer.string(obj.msg) + } else { + throw new Error('Protocol error: required field "msg" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length - bufs[0] = prefix - length += prefix.byteLength + while (reader.pos < end) { + const tag = reader.uint32() - return { - bufs, - length + switch (tag >>> 3) { + case 1: + obj.msg = reader.string() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.msg == null) { + throw new Error('Protocol error: value for required field "msg" was not found in protobuf') } - }, { - '1': { name: 'msg', codec: string } + + return obj }) } return _codec } - export const encode = (obj: ErrorResponse): Uint8ArrayList => { + export const encode = (obj: ErrorResponse): Uint8Array => { return encodeMessage(obj, ErrorResponse.codec()) } @@ -705,67 +742,79 @@ export namespace StreamInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $peer = obj.peer - if ($peer != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($peer) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.peer != null) { + writer.uint32(10) + writer.bytes(obj.peer) + } else { + throw new Error('Protocol error: required field "peer" was not found in object') } - const $addr = obj.addr - if ($addr != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($addr) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.addr != null) { + writer.uint32(18) + writer.bytes(obj.addr) + } else { + throw new Error('Protocol error: required field "addr" was not found in object') } - const $proto = obj.proto - if ($proto != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = string.encode($proto) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.proto != null) { + writer.uint32(26) + writer.string(obj.proto) + } else { + throw new Error('Protocol error: required field "proto" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + case 2: + obj.addr = reader.bytes() + break + case 3: + obj.proto = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + if (obj.peer == null) { + throw new Error('Protocol error: value for required field "peer" was not found in protobuf') + } - return { - bufs, - length - } + if (obj.addr == null) { + throw new Error('Protocol error: value for required field "addr" was not found in protobuf') } - return { - bufs, - length + if (obj.proto == null) { + throw new Error('Protocol error: value for required field "proto" was not found in protobuf') } - }, { - '1': { name: 'peer', codec: bytes }, - '2': { name: 'addr', codec: bytes }, - '3': { name: 'proto', codec: string } + + return obj }) } return _codec } - export const encode = (obj: StreamInfo): Uint8ArrayList => { + export const encode = (obj: StreamInfo): Uint8Array => { return encodeMessage(obj, StreamInfo.codec()) } @@ -819,103 +868,99 @@ export namespace DHTRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = DHTRequest.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + DHTRequest.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') } - const $peer = obj.peer - if ($peer != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($peer) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.peer != null) { + writer.uint32(18) + writer.bytes(obj.peer) } - const $cid = obj.cid - if ($cid != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($cid) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.cid != null) { + writer.uint32(26) + writer.bytes(obj.cid) } - const $key = obj.key - if ($key != null) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = bytes.encode($key) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.key != null) { + writer.uint32(34) + writer.bytes(obj.key) } - const $value = obj.value - if ($value != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = bytes.encode($value) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.value != null) { + writer.uint32(42) + writer.bytes(obj.value) } - const $count = obj.count - if ($count != null) { - const prefixField6 = Uint8Array.from([48]) - const encodedField6 = int32.encode($count) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + if (obj.count != null) { + writer.uint32(48) + writer.int32(obj.count) } - const $timeout = obj.timeout - if ($timeout != null) { - const prefixField7 = Uint8Array.from([56]) - const encodedField7 = int64.encode($timeout) - bufs.push(prefixField7, ...encodedField7.bufs) - length += prefixField7.byteLength + encodedField7.length + if (obj.timeout != null) { + writer.uint32(56) + writer.int64(obj.timeout) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = DHTRequest.Type.codec().decode(reader) + break + case 2: + obj.peer = reader.bytes() + break + case 3: + obj.cid = reader.bytes() + break + case 4: + obj.key = reader.bytes() + break + case 5: + obj.value = reader.bytes() + break + case 6: + obj.count = reader.int32() + break + case 7: + obj.timeout = reader.int64() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - }, { - '1': { name: 'type', codec: DHTRequest.Type.codec() }, - '2': { name: 'peer', codec: bytes, optional: true }, - '3': { name: 'cid', codec: bytes, optional: true }, - '4': { name: 'key', codec: bytes, optional: true }, - '5': { name: 'value', codec: bytes, optional: true }, - '6': { name: 'count', codec: int32, optional: true }, - '7': { name: 'timeout', codec: int64, optional: true } + + return obj }) } return _codec } - export const encode = (obj: DHTRequest): Uint8ArrayList => { + export const encode = (obj: DHTRequest): Uint8Array => { return encodeMessage(obj, DHTRequest.codec()) } @@ -953,67 +998,67 @@ export namespace DHTResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = DHTResponse.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + DHTResponse.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') } - const $peer = obj.peer - if ($peer != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = PeerInfo.codec().encode($peer) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.peer != null) { + writer.uint32(18) + PeerInfo.codec().encode(obj.peer, writer) } - const $value = obj.value - if ($value != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($value) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.value != null) { + writer.uint32(26) + writer.bytes(obj.value) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = DHTResponse.Type.codec().decode(reader) + break + case 2: + obj.peer = PeerInfo.codec().decode(reader, reader.uint32()) + break + case 3: + obj.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - }, { - '1': { name: 'type', codec: DHTResponse.Type.codec() }, - '2': { name: 'peer', codec: PeerInfo.codec(), optional: true }, - '3': { name: 'value', codec: bytes, optional: true } + + return obj }) } return _codec } - export const encode = (obj: DHTResponse): Uint8ArrayList => { + export const encode = (obj: DHTResponse): Uint8Array => { return encodeMessage(obj, DHTResponse.codec()) } @@ -1032,60 +1077,70 @@ export namespace PeerInfo { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $id = obj.id - if ($id != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($id) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $addrs = obj.addrs - if ($addrs != null) { - for (const value of $addrs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + writer.fork() + } + + if (obj.id != null) { + writer.uint32(10) + writer.bytes(obj.id) + } else { + throw new Error('Protocol error: required field "id" was not found in object') + } + + if (obj.addrs != null) { + for (const value of obj.addrs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "addrs" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs = obj.addrs ?? [] + obj.addrs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.addrs = obj.addrs ?? [] - return { - bufs, - length - } + if (obj.id == null) { + throw new Error('Protocol error: value for required field "id" was not found in protobuf') } - return { - bufs, - length + if (obj.addrs == null) { + throw new Error('Protocol error: value for required field "addrs" was not found in protobuf') } - }, { - '1': { name: 'id', codec: bytes }, - '2': { name: 'addrs', codec: bytes, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: PeerInfo): Uint8ArrayList => { + export const encode = (obj: PeerInfo): Uint8Array => { return encodeMessage(obj, PeerInfo.codec()) } @@ -1124,76 +1179,75 @@ export namespace ConnManagerRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = ConnManagerRequest.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + ConnManagerRequest.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') } - const $peer = obj.peer - if ($peer != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($peer) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.peer != null) { + writer.uint32(18) + writer.bytes(obj.peer) } - const $tag = obj.tag - if ($tag != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = string.encode($tag) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.tag != null) { + writer.uint32(26) + writer.string(obj.tag) } - const $weight = obj.weight - if ($weight != null) { - const prefixField4 = Uint8Array.from([32]) - const encodedField4 = int64.encode($weight) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.weight != null) { + writer.uint32(32) + writer.int64(obj.weight) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = ConnManagerRequest.Type.codec().decode(reader) + break + case 2: + obj.peer = reader.bytes() + break + case 3: + obj.tag = reader.string() + break + case 4: + obj.weight = reader.int64() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - }, { - '1': { name: 'type', codec: ConnManagerRequest.Type.codec() }, - '2': { name: 'peer', codec: bytes, optional: true }, - '3': { name: 'tag', codec: string, optional: true }, - '4': { name: 'weight', codec: int64, optional: true } + + return obj }) } return _codec } - export const encode = (obj: ConnManagerRequest): Uint8ArrayList => { + export const encode = (obj: ConnManagerRequest): Uint8Array => { return encodeMessage(obj, ConnManagerRequest.codec()) } @@ -1211,49 +1265,51 @@ export namespace DisconnectRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $peer = obj.peer - if ($peer != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($peer) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.peer != null) { + writer.uint32(10) + writer.bytes(obj.peer) + } else { + throw new Error('Protocol error: required field "peer" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} - bufs[0] = prefix - length += prefix.byteLength + const end = length == null ? reader.len : reader.pos + length - return { - bufs, - length + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.peer == null) { + throw new Error('Protocol error: value for required field "peer" was not found in protobuf') } - }, { - '1': { name: 'peer', codec: bytes } + + return obj }) } return _codec } - export const encode = (obj: DisconnectRequest): Uint8ArrayList => { + export const encode = (obj: DisconnectRequest): Uint8Array => { return encodeMessage(obj, DisconnectRequest.codec()) } @@ -1293,67 +1349,67 @@ export namespace PSRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = PSRequest.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + PSRequest.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') } - const $topic = obj.topic - if ($topic != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = string.encode($topic) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.topic != null) { + writer.uint32(18) + writer.string(obj.topic) } - const $data = obj.data - if ($data != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($data) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.data != null) { + writer.uint32(26) + writer.bytes(obj.data) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = PSRequest.Type.codec().decode(reader) + break + case 2: + obj.topic = reader.string() + break + case 3: + obj.data = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - }, { - '1': { name: 'type', codec: PSRequest.Type.codec() }, - '2': { name: 'topic', codec: string, optional: true }, - '3': { name: 'data', codec: bytes, optional: true } + + return obj }) } return _codec } - export const encode = (obj: PSRequest): Uint8ArrayList => { + export const encode = (obj: PSRequest): Uint8Array => { return encodeMessage(obj, PSRequest.codec()) } @@ -1376,96 +1432,96 @@ export namespace PSMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $from = obj.from - if ($from != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($from) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $data = obj.data - if ($data != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($data) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length - } - - const $seqno = obj.seqno - if ($seqno != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($seqno) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length - } - - const $topicIDs = obj.topicIDs - if ($topicIDs != null) { - for (const value of $topicIDs) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = string.encode(value) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length - } + writer.fork() } - const $signature = obj.signature - if ($signature != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = bytes.encode($signature) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.from != null) { + writer.uint32(10) + writer.bytes(obj.from) } - const $key = obj.key - if ($key != null) { - const prefixField6 = Uint8Array.from([50]) - const encodedField6 = bytes.encode($key) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + if (obj.data != null) { + writer.uint32(18) + writer.bytes(obj.data) } - if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + if (obj.seqno != null) { + writer.uint32(26) + writer.bytes(obj.seqno) + } + + if (obj.topicIDs != null) { + for (const value of obj.topicIDs) { + writer.uint32(34) + writer.string(value) + } + } else { + throw new Error('Protocol error: required field "topicIDs" was not found in object') + } - bufs[0] = prefix - length += prefix.byteLength + if (obj.signature != null) { + writer.uint32(42) + writer.bytes(obj.signature) + } - return { - bufs, - length + if (obj.key != null) { + writer.uint32(50) + writer.bytes(obj.key) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.from = reader.bytes() + break + case 2: + obj.data = reader.bytes() + break + case 3: + obj.seqno = reader.bytes() + break + case 4: + obj.topicIDs = obj.topicIDs ?? [] + obj.topicIDs.push(reader.string()) + break + case 5: + obj.signature = reader.bytes() + break + case 6: + obj.key = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + obj.topicIDs = obj.topicIDs ?? [] + + if (obj.topicIDs == null) { + throw new Error('Protocol error: value for required field "topicIDs" was not found in protobuf') } - }, { - '1': { name: 'from', codec: bytes, optional: true }, - '2': { name: 'data', codec: bytes, optional: true }, - '3': { name: 'seqno', codec: bytes, optional: true }, - '4': { name: 'topicIDs', codec: string, repeats: true }, - '5': { name: 'signature', codec: bytes, optional: true }, - '6': { name: 'key', codec: bytes, optional: true } + + return obj }) } return _codec } - export const encode = (obj: PSMessage): Uint8ArrayList => { + export const encode = (obj: PSMessage): Uint8Array => { return encodeMessage(obj, PSMessage.codec()) } @@ -1484,62 +1540,74 @@ export namespace PSResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $topics = obj.topics - if ($topics != null) { - for (const value of $topics) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode(value) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + writer.fork() + } + + if (obj.topics != null) { + for (const value of obj.topics) { + writer.uint32(10) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "topics" was not found in object') } - const $peerIDs = obj.peerIDs - if ($peerIDs != null) { - for (const value of $peerIDs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.peerIDs != null) { + for (const value of obj.peerIDs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "peerIDs" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.topics = obj.topics ?? [] + obj.topics.push(reader.string()) + break + case 2: + obj.peerIDs = obj.peerIDs ?? [] + obj.peerIDs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.topics = obj.topics ?? [] + obj.peerIDs = obj.peerIDs ?? [] - return { - bufs, - length - } + if (obj.topics == null) { + throw new Error('Protocol error: value for required field "topics" was not found in protobuf') } - return { - bufs, - length + if (obj.peerIDs == null) { + throw new Error('Protocol error: value for required field "peerIDs" was not found in protobuf') } - }, { - '1': { name: 'topics', codec: string, repeats: true }, - '2': { name: 'peerIDs', codec: bytes, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: PSResponse): Uint8ArrayList => { + export const encode = (obj: PSResponse): Uint8Array => { return encodeMessage(obj, PSResponse.codec()) } @@ -1575,69 +1643,78 @@ export namespace PeerstoreRequest { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = PeerstoreRequest.Type.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $id = obj.id - if ($id != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($id) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length - } - - const $protos = obj.protos - if ($protos != null) { - for (const value of $protos) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = string.encode(value) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + writer.fork() + } + + if (obj.type != null) { + writer.uint32(8) + PeerstoreRequest.Type.codec().encode(obj.type, writer) + } else { + throw new Error('Protocol error: required field "type" was not found in object') + } + + if (obj.id != null) { + writer.uint32(18) + writer.bytes(obj.id) + } + + if (obj.protos != null) { + for (const value of obj.protos) { + writer.uint32(26) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "protos" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = PeerstoreRequest.Type.codec().decode(reader) + break + case 2: + obj.id = reader.bytes() + break + case 3: + obj.protos = obj.protos ?? [] + obj.protos.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.protos = obj.protos ?? [] - return { - bufs, - length - } + if (obj.type == null) { + throw new Error('Protocol error: value for required field "type" was not found in protobuf') } - return { - bufs, - length + if (obj.protos == null) { + throw new Error('Protocol error: value for required field "protos" was not found in protobuf') } - }, { - '1': { name: 'type', codec: PeerstoreRequest.Type.codec() }, - '2': { name: 'id', codec: bytes, optional: true }, - '3': { name: 'protos', codec: string, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: PeerstoreRequest): Uint8ArrayList => { + export const encode = (obj: PeerstoreRequest): Uint8Array => { return encodeMessage(obj, PeerstoreRequest.codec()) } @@ -1656,60 +1733,64 @@ export namespace PeerstoreResponse { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $peer = obj.peer - if ($peer != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = PeerInfo.codec().encode($peer) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length - } - - const $protos = obj.protos - if ($protos != null) { - for (const value of $protos) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = string.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length - } + writer.fork() } - if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + if (obj.peer != null) { + writer.uint32(10) + PeerInfo.codec().encode(obj.peer, writer) + } - bufs[0] = prefix - length += prefix.byteLength + if (obj.protos != null) { + for (const value of obj.protos) { + writer.uint32(18) + writer.string(value) + } + } else { + throw new Error('Protocol error: required field "protos" was not found in object') + } - return { - bufs, - length + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peer = PeerInfo.codec().decode(reader, reader.uint32()) + break + case 2: + obj.protos = obj.protos ?? [] + obj.protos.push(reader.string()) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + obj.protos = obj.protos ?? [] + + if (obj.protos == null) { + throw new Error('Protocol error: value for required field "protos" was not found in protobuf') } - }, { - '1': { name: 'peer', codec: PeerInfo.codec(), optional: true }, - '2': { name: 'protos', codec: string, repeats: true } + + return obj }) } return _codec } - export const encode = (obj: PeerstoreResponse): Uint8ArrayList => { + export const encode = (obj: PeerstoreResponse): Uint8Array => { return encodeMessage(obj, PeerstoreResponse.codec()) } diff --git a/packages/protons/test/fixtures/dht.ts b/packages/protons/test/fixtures/dht.ts index 2732830..fc278a9 100644 --- a/packages/protons/test/fixtures/dht.ts +++ b/packages/protons/test/fixtures/dht.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { encodeMessage, decodeMessage, message, bytes, string, enumeration, int32 } from 'protons-runtime' +import { encodeMessage, decodeMessage, message, enumeration } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface Record { @@ -19,85 +18,77 @@ export namespace Record { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $key = obj.key - if ($key != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($key) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.key != null) { + writer.uint32(10) + writer.bytes(obj.key) } - const $value = obj.value - if ($value != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.value != null) { + writer.uint32(18) + writer.bytes(obj.value) } - const $author = obj.author - if ($author != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($author) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.author != null) { + writer.uint32(26) + writer.bytes(obj.author) } - const $signature = obj.signature - if ($signature != null) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = bytes.encode($signature) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.signature != null) { + writer.uint32(34) + writer.bytes(obj.signature) } - const $timeReceived = obj.timeReceived - if ($timeReceived != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = string.encode($timeReceived) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.timeReceived != null) { + writer.uint32(42) + writer.string(obj.timeReceived) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.bytes() + break + case 2: + obj.value = reader.bytes() + break + case 3: + obj.author = reader.bytes() + break + case 4: + obj.signature = reader.bytes() + break + case 5: + obj.timeReceived = reader.string() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length - } - }, { - '1': { name: 'key', codec: bytes, optional: true }, - '2': { name: 'value', codec: bytes, optional: true }, - '3': { name: 'author', codec: bytes, optional: true }, - '4': { name: 'signature', codec: bytes, optional: true }, - '5': { name: 'timeReceived', codec: string, optional: true } + return obj }) } return _codec } - export const encode = (obj: Record): Uint8ArrayList => { + export const encode = (obj: Record): Uint8Array => { return encodeMessage(obj, Record.codec()) } @@ -171,69 +162,72 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $id = obj.id - if ($id != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($id) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.id != null) { + writer.uint32(10) + writer.bytes(obj.id) } - const $addrs = obj.addrs - if ($addrs != null) { - for (const value of $addrs) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.addrs != null) { + for (const value of obj.addrs) { + writer.uint32(18) + writer.bytes(value) } + } else { + throw new Error('Protocol error: required field "addrs" was not found in object') } - const $connection = obj.connection - if ($connection != null) { - const prefixField3 = Uint8Array.from([24]) - const encodedField3 = Message.ConnectionType.codec().encode($connection) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.connection != null) { + writer.uint32(24) + Message.ConnectionType.codec().encode(obj.connection, writer) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes() + break + case 2: + obj.addrs = obj.addrs ?? [] + obj.addrs.push(reader.bytes()) + break + case 3: + obj.connection = Message.ConnectionType.codec().decode(reader) + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + obj.addrs = obj.addrs ?? [] + + if (obj.addrs == null) { + throw new Error('Protocol error: value for required field "addrs" was not found in protobuf') } - }, { - '1': { name: 'id', codec: bytes, optional: true }, - '2': { name: 'addrs', codec: bytes, repeats: true }, - '3': { name: 'connection', codec: Message.ConnectionType.codec(), optional: true } + + return obj }) } return _codec } - export const encode = (obj: Peer): Uint8ArrayList => { + export const encode = (obj: Peer): Uint8Array => { return encodeMessage(obj, Peer.codec()) } @@ -246,98 +240,106 @@ export namespace Message { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $type = obj.type - if ($type != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = Message.MessageType.codec().encode($type) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.type != null) { + writer.uint32(8) + Message.MessageType.codec().encode(obj.type, writer) } - const $clusterLevelRaw = obj.clusterLevelRaw - if ($clusterLevelRaw != null) { - const prefixField10 = Uint8Array.from([80]) - const encodedField10 = int32.encode($clusterLevelRaw) - bufs.push(prefixField10, ...encodedField10.bufs) - length += prefixField10.byteLength + encodedField10.length + if (obj.clusterLevelRaw != null) { + writer.uint32(80) + writer.int32(obj.clusterLevelRaw) } - const $key = obj.key - if ($key != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($key) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.key != null) { + writer.uint32(18) + writer.bytes(obj.key) } - const $record = obj.record - if ($record != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($record) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.record != null) { + writer.uint32(26) + writer.bytes(obj.record) } - const $closerPeers = obj.closerPeers - if ($closerPeers != null) { - for (const value of $closerPeers) { - const prefixField8 = Uint8Array.from([66]) - const encodedField8 = Message.Peer.codec().encode(value) - bufs.push(prefixField8, ...encodedField8.bufs) - length += prefixField8.byteLength + encodedField8.length + if (obj.closerPeers != null) { + for (const value of obj.closerPeers) { + writer.uint32(66) + Message.Peer.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "closerPeers" was not found in object') } - const $providerPeers = obj.providerPeers - if ($providerPeers != null) { - for (const value of $providerPeers) { - const prefixField9 = Uint8Array.from([74]) - const encodedField9 = Message.Peer.codec().encode(value) - bufs.push(prefixField9, ...encodedField9.bufs) - length += prefixField9.byteLength + encodedField9.length + if (obj.providerPeers != null) { + for (const value of obj.providerPeers) { + writer.uint32(74) + Message.Peer.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "providerPeers" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.type = Message.MessageType.codec().decode(reader) + break + case 10: + obj.clusterLevelRaw = reader.int32() + break + case 2: + obj.key = reader.bytes() + break + case 3: + obj.record = reader.bytes() + break + case 8: + obj.closerPeers = obj.closerPeers ?? [] + obj.closerPeers.push(Message.Peer.codec().decode(reader, reader.uint32())) + break + case 9: + obj.providerPeers = obj.providerPeers ?? [] + obj.providerPeers.push(Message.Peer.codec().decode(reader, reader.uint32())) + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.closerPeers = obj.closerPeers ?? [] + obj.providerPeers = obj.providerPeers ?? [] - return { - bufs, - length - } + if (obj.closerPeers == null) { + throw new Error('Protocol error: value for required field "closerPeers" was not found in protobuf') } - return { - bufs, - length + if (obj.providerPeers == null) { + throw new Error('Protocol error: value for required field "providerPeers" was not found in protobuf') } - }, { - '1': { name: 'type', codec: Message.MessageType.codec(), optional: true }, - '10': { name: 'clusterLevelRaw', codec: int32, optional: true }, - '2': { name: 'key', codec: bytes, optional: true }, - '3': { name: 'record', codec: bytes, optional: true }, - '8': { name: 'closerPeers', codec: Message.Peer.codec(), repeats: true }, - '9': { name: 'providerPeers', codec: Message.Peer.codec(), repeats: true } + + return obj }) } return _codec } - export const encode = (obj: Message): Uint8ArrayList => { + export const encode = (obj: Message): Uint8Array => { return encodeMessage(obj, Message.codec()) } diff --git a/packages/protons/test/fixtures/noise.ts b/packages/protons/test/fixtures/noise.ts index 0c3fc0d..0bdada6 100644 --- a/packages/protons/test/fixtures/noise.ts +++ b/packages/protons/test/fixtures/noise.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { encodeMessage, decodeMessage, message, bytes } from 'protons-runtime' +import { encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export namespace pb { @@ -18,67 +17,79 @@ export namespace pb { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $identityKey = obj.identityKey - if ($identityKey != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($identityKey) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.identityKey != null) { + writer.uint32(10) + writer.bytes(obj.identityKey) + } else { + throw new Error('Protocol error: required field "identityKey" was not found in object') } - const $identitySig = obj.identitySig - if ($identitySig != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($identitySig) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.identitySig != null) { + writer.uint32(18) + writer.bytes(obj.identitySig) + } else { + throw new Error('Protocol error: required field "identitySig" was not found in object') } - const $data = obj.data - if ($data != null) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = bytes.encode($data) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.data != null) { + writer.uint32(26) + writer.bytes(obj.data) + } else { + throw new Error('Protocol error: required field "data" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.identityKey = reader.bytes() + break + case 2: + obj.identitySig = reader.bytes() + break + case 3: + obj.data = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + if (obj.identityKey == null) { + throw new Error('Protocol error: value for required field "identityKey" was not found in protobuf') + } - return { - bufs, - length - } + if (obj.identitySig == null) { + throw new Error('Protocol error: value for required field "identitySig" was not found in protobuf') } - return { - bufs, - length + if (obj.data == null) { + throw new Error('Protocol error: value for required field "data" was not found in protobuf') } - }, { - '1': { name: 'identityKey', codec: bytes }, - '2': { name: 'identitySig', codec: bytes }, - '3': { name: 'data', codec: bytes } + + return obj }) } return _codec } - export const encode = (obj: NoiseHandshakePayload): Uint8ArrayList => { + export const encode = (obj: NoiseHandshakePayload): Uint8Array => { return encodeMessage(obj, NoiseHandshakePayload.codec()) } diff --git a/packages/protons/test/fixtures/peer.ts b/packages/protons/test/fixtures/peer.ts index a80a6c6..68bd477 100644 --- a/packages/protons/test/fixtures/peer.ts +++ b/packages/protons/test/fixtures/peer.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { encodeMessage, decodeMessage, message, string, bytes, bool } from 'protons-runtime' +import { encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export interface Peer { @@ -19,91 +18,108 @@ export namespace Peer { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) - } - - let length = 0 - - const $addresses = obj.addresses - if ($addresses != null) { - for (const value of $addresses) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = Address.codec().encode(value) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + writer.fork() + } + + if (obj.addresses != null) { + for (const value of obj.addresses) { + writer.uint32(10) + Address.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "addresses" was not found in object') } - const $protocols = obj.protocols - if ($protocols != null) { - for (const value of $protocols) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = string.encode(value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.protocols != null) { + for (const value of obj.protocols) { + writer.uint32(18) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "protocols" was not found in object') } - const $metadata = obj.metadata - if ($metadata != null) { - for (const value of $metadata) { - const prefixField3 = Uint8Array.from([26]) - const encodedField3 = Metadata.codec().encode(value) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.metadata != null) { + for (const value of obj.metadata) { + writer.uint32(26) + Metadata.codec().encode(value, writer) } + } else { + throw new Error('Protocol error: required field "metadata" was not found in object') } - const $pubKey = obj.pubKey - if ($pubKey != null) { - const prefixField4 = Uint8Array.from([34]) - const encodedField4 = bytes.encode($pubKey) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.pubKey != null) { + writer.uint32(34) + writer.bytes(obj.pubKey) } - const $peerRecordEnvelope = obj.peerRecordEnvelope - if ($peerRecordEnvelope != null) { - const prefixField5 = Uint8Array.from([42]) - const encodedField5 = bytes.encode($peerRecordEnvelope) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.peerRecordEnvelope != null) { + writer.uint32(42) + writer.bytes(obj.peerRecordEnvelope) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.addresses = obj.addresses ?? [] + obj.addresses.push(Address.codec().decode(reader, reader.uint32())) + break + case 2: + obj.protocols = obj.protocols ?? [] + obj.protocols.push(reader.string()) + break + case 3: + obj.metadata = obj.metadata ?? [] + obj.metadata.push(Metadata.codec().decode(reader, reader.uint32())) + break + case 4: + obj.pubKey = reader.bytes() + break + case 5: + obj.peerRecordEnvelope = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } - bufs[0] = prefix - length += prefix.byteLength + obj.addresses = obj.addresses ?? [] + obj.protocols = obj.protocols ?? [] + obj.metadata = obj.metadata ?? [] - return { - bufs, - length - } + if (obj.addresses == null) { + throw new Error('Protocol error: value for required field "addresses" was not found in protobuf') + } + + if (obj.protocols == null) { + throw new Error('Protocol error: value for required field "protocols" was not found in protobuf') } - return { - bufs, - length + if (obj.metadata == null) { + throw new Error('Protocol error: value for required field "metadata" was not found in protobuf') } - }, { - '1': { name: 'addresses', codec: Address.codec(), repeats: true }, - '2': { name: 'protocols', codec: string, repeats: true }, - '3': { name: 'metadata', codec: Metadata.codec(), repeats: true }, - '4': { name: 'pubKey', codec: bytes, optional: true }, - '5': { name: 'peerRecordEnvelope', codec: bytes, optional: true } + + return obj }) } return _codec } - export const encode = (obj: Peer): Uint8ArrayList => { + export const encode = (obj: Peer): Uint8Array => { return encodeMessage(obj, Peer.codec()) } @@ -122,58 +138,59 @@ export namespace Address { export const codec = (): Codec
=> { if (_codec == null) { - _codec = message
((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message
((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $multiaddr = obj.multiaddr - if ($multiaddr != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = bytes.encode($multiaddr) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.multiaddr != null) { + writer.uint32(10) + writer.bytes(obj.multiaddr) + } else { + throw new Error('Protocol error: required field "multiaddr" was not found in object') } - const $isCertified = obj.isCertified - if ($isCertified != null) { - const prefixField2 = Uint8Array.from([16]) - const encodedField2 = bool.encode($isCertified) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.isCertified != null) { + writer.uint32(16) + writer.bool(obj.isCertified) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.multiaddr = reader.bytes() + break + case 2: + obj.isCertified = reader.bool() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.multiaddr == null) { + throw new Error('Protocol error: value for required field "multiaddr" was not found in protobuf') } - }, { - '1': { name: 'multiaddr', codec: bytes }, - '2': { name: 'isCertified', codec: bool, optional: true } + + return obj }) } return _codec } - export const encode = (obj: Address): Uint8ArrayList => { + export const encode = (obj: Address): Uint8Array => { return encodeMessage(obj, Address.codec()) } @@ -192,58 +209,65 @@ export namespace Metadata { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $key = obj.key - if ($key != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($key) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.key != null) { + writer.uint32(10) + writer.string(obj.key) + } else { + throw new Error('Protocol error: required field "key" was not found in object') } - const $value = obj.value - if ($value != null) { - const prefixField2 = Uint8Array.from([18]) - const encodedField2 = bytes.encode($value) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.value != null) { + writer.uint32(18) + writer.bytes(obj.value) + } else { + throw new Error('Protocol error: required field "value" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.string() + break + case 2: + obj.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.key == null) { + throw new Error('Protocol error: value for required field "key" was not found in protobuf') } - }, { - '1': { name: 'key', codec: string }, - '2': { name: 'value', codec: bytes } + + if (obj.value == null) { + throw new Error('Protocol error: value for required field "value" was not found in protobuf') + } + + return obj }) } return _codec } - export const encode = (obj: Metadata): Uint8ArrayList => { + export const encode = (obj: Metadata): Uint8Array => { return encodeMessage(obj, Metadata.codec()) } diff --git a/packages/protons/test/fixtures/test.ts b/packages/protons/test/fixtures/test.ts index b90dabb..eea1650 100644 --- a/packages/protons/test/fixtures/test.ts +++ b/packages/protons/test/fixtures/test.ts @@ -1,9 +1,8 @@ /* eslint-disable import/export */ /* eslint-disable @typescript-eslint/no-namespace */ -import { enumeration, encodeMessage, decodeMessage, message, string, bool, int32, int64, uint32, uint64, sint32, sint64, double, float, bytes, fixed32, fixed64, sfixed32, sfixed64 } from 'protons-runtime' +import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' import type { Uint8ArrayList } from 'uint8arraylist' -import { unsigned } from 'uint8-varint' import type { Codec } from 'protons-runtime' export enum AnEnum { @@ -30,49 +29,51 @@ export namespace SubMessage { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $foo = obj.foo - if ($foo != null) { - const prefixField1 = Uint8Array.from([10]) - const encodedField1 = string.encode($foo) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.foo != null) { + writer.uint32(10) + writer.string(obj.foo) + } else { + throw new Error('Protocol error: required field "foo" was not found in object') } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length - bufs[0] = prefix - length += prefix.byteLength + while (reader.pos < end) { + const tag = reader.uint32() - return { - bufs, - length + switch (tag >>> 3) { + case 1: + obj.foo = reader.string() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + if (obj.foo == null) { + throw new Error('Protocol error: value for required field "foo" was not found in protobuf') } - }, { - '1': { name: 'foo', codec: string } + + return obj }) } return _codec } - export const encode = (obj: SubMessage): Uint8ArrayList => { + export const encode = (obj: SubMessage): Uint8Array => { return encodeMessage(obj, SubMessage.codec()) } @@ -107,204 +108,192 @@ export namespace AllTheTypes { export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, opts = {}) => { - const bufs: Uint8Array[] = [] - + _codec = message((obj, writer, opts = {}) => { if (opts.lengthDelimited !== false) { - // will hold length prefix - bufs.push(new Uint8Array(0)) + writer.fork() } - let length = 0 - - const $field1 = obj.field1 - if ($field1 != null) { - const prefixField1 = Uint8Array.from([8]) - const encodedField1 = bool.encode($field1) - bufs.push(prefixField1, ...encodedField1.bufs) - length += prefixField1.byteLength + encodedField1.length + if (obj.field1 != null) { + writer.uint32(8) + writer.bool(obj.field1) } - const $field2 = obj.field2 - if ($field2 != null) { - const prefixField2 = Uint8Array.from([16]) - const encodedField2 = int32.encode($field2) - bufs.push(prefixField2, ...encodedField2.bufs) - length += prefixField2.byteLength + encodedField2.length + if (obj.field2 != null) { + writer.uint32(16) + writer.int32(obj.field2) } - const $field3 = obj.field3 - if ($field3 != null) { - const prefixField3 = Uint8Array.from([24]) - const encodedField3 = int64.encode($field3) - bufs.push(prefixField3, ...encodedField3.bufs) - length += prefixField3.byteLength + encodedField3.length + if (obj.field3 != null) { + writer.uint32(24) + writer.int64(obj.field3) } - const $field4 = obj.field4 - if ($field4 != null) { - const prefixField4 = Uint8Array.from([32]) - const encodedField4 = uint32.encode($field4) - bufs.push(prefixField4, ...encodedField4.bufs) - length += prefixField4.byteLength + encodedField4.length + if (obj.field4 != null) { + writer.uint32(32) + writer.uint32(obj.field4) } - const $field5 = obj.field5 - if ($field5 != null) { - const prefixField5 = Uint8Array.from([40]) - const encodedField5 = uint64.encode($field5) - bufs.push(prefixField5, ...encodedField5.bufs) - length += prefixField5.byteLength + encodedField5.length + if (obj.field5 != null) { + writer.uint32(40) + writer.uint64(obj.field5) } - const $field6 = obj.field6 - if ($field6 != null) { - const prefixField6 = Uint8Array.from([48]) - const encodedField6 = sint32.encode($field6) - bufs.push(prefixField6, ...encodedField6.bufs) - length += prefixField6.byteLength + encodedField6.length + if (obj.field6 != null) { + writer.uint32(48) + writer.sint32(obj.field6) } - const $field7 = obj.field7 - if ($field7 != null) { - const prefixField7 = Uint8Array.from([56]) - const encodedField7 = sint64.encode($field7) - bufs.push(prefixField7, ...encodedField7.bufs) - length += prefixField7.byteLength + encodedField7.length + if (obj.field7 != null) { + writer.uint32(56) + writer.sint64(obj.field7) } - const $field8 = obj.field8 - if ($field8 != null) { - const prefixField8 = Uint8Array.from([65]) - const encodedField8 = double.encode($field8) - bufs.push(prefixField8, ...encodedField8.bufs) - length += prefixField8.byteLength + encodedField8.length + if (obj.field8 != null) { + writer.uint32(65) + writer.double(obj.field8) } - const $field9 = obj.field9 - if ($field9 != null) { - const prefixField9 = Uint8Array.from([77]) - const encodedField9 = float.encode($field9) - bufs.push(prefixField9, ...encodedField9.bufs) - length += prefixField9.byteLength + encodedField9.length + if (obj.field9 != null) { + writer.uint32(77) + writer.float(obj.field9) } - const $field10 = obj.field10 - if ($field10 != null) { - const prefixField10 = Uint8Array.from([82]) - const encodedField10 = string.encode($field10) - bufs.push(prefixField10, ...encodedField10.bufs) - length += prefixField10.byteLength + encodedField10.length + if (obj.field10 != null) { + writer.uint32(82) + writer.string(obj.field10) } - const $field11 = obj.field11 - if ($field11 != null) { - const prefixField11 = Uint8Array.from([90]) - const encodedField11 = bytes.encode($field11) - bufs.push(prefixField11, ...encodedField11.bufs) - length += prefixField11.byteLength + encodedField11.length + if (obj.field11 != null) { + writer.uint32(90) + writer.bytes(obj.field11) } - const $field12 = obj.field12 - if ($field12 != null) { - const prefixField12 = Uint8Array.from([96]) - const encodedField12 = AnEnum.codec().encode($field12) - bufs.push(prefixField12, ...encodedField12.bufs) - length += prefixField12.byteLength + encodedField12.length + if (obj.field12 != null) { + writer.uint32(96) + AnEnum.codec().encode(obj.field12, writer) } - const $field13 = obj.field13 - if ($field13 != null) { - const prefixField13 = Uint8Array.from([106]) - const encodedField13 = SubMessage.codec().encode($field13) - bufs.push(prefixField13, ...encodedField13.bufs) - length += prefixField13.byteLength + encodedField13.length + if (obj.field13 != null) { + writer.uint32(106) + SubMessage.codec().encode(obj.field13, writer) } - const $field14 = obj.field14 - if ($field14 != null) { - for (const value of $field14) { - const prefixField14 = Uint8Array.from([114]) - const encodedField14 = string.encode(value) - bufs.push(prefixField14, ...encodedField14.bufs) - length += prefixField14.byteLength + encodedField14.length + if (obj.field14 != null) { + for (const value of obj.field14) { + writer.uint32(114) + writer.string(value) } + } else { + throw new Error('Protocol error: required field "field14" was not found in object') } - const $field15 = obj.field15 - if ($field15 != null) { - const prefixField15 = Uint8Array.from([125]) - const encodedField15 = fixed32.encode($field15) - bufs.push(prefixField15, ...encodedField15.bufs) - length += prefixField15.byteLength + encodedField15.length + if (obj.field15 != null) { + writer.uint32(125) + writer.fixed32(obj.field15) } - const $field16 = obj.field16 - if ($field16 != null) { - const prefixField16 = Uint8Array.from([129, 1]) - const encodedField16 = fixed64.encode($field16) - bufs.push(prefixField16, ...encodedField16.bufs) - length += prefixField16.byteLength + encodedField16.length + if (obj.field16 != null) { + writer.uint32(129) + writer.fixed64(obj.field16) } - const $field17 = obj.field17 - if ($field17 != null) { - const prefixField17 = Uint8Array.from([141, 1]) - const encodedField17 = sfixed32.encode($field17) - bufs.push(prefixField17, ...encodedField17.bufs) - length += prefixField17.byteLength + encodedField17.length + if (obj.field17 != null) { + writer.uint32(141) + writer.sfixed32(obj.field17) } - const $field18 = obj.field18 - if ($field18 != null) { - const prefixField18 = Uint8Array.from([145, 1]) - const encodedField18 = sfixed64.encode($field18) - bufs.push(prefixField18, ...encodedField18.bufs) - length += prefixField18.byteLength + encodedField18.length + if (obj.field18 != null) { + writer.uint32(145) + writer.sfixed64(obj.field18) } if (opts.lengthDelimited !== false) { - const prefix = unsigned.encode(length) - - bufs[0] = prefix - length += prefix.byteLength - - return { - bufs, - length + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.field1 = reader.bool() + break + case 2: + obj.field2 = reader.int32() + break + case 3: + obj.field3 = reader.int64() + break + case 4: + obj.field4 = reader.uint32() + break + case 5: + obj.field5 = reader.uint64() + break + case 6: + obj.field6 = reader.sint32() + break + case 7: + obj.field7 = reader.sint64() + break + case 8: + obj.field8 = reader.double() + break + case 9: + obj.field9 = reader.float() + break + case 10: + obj.field10 = reader.string() + break + case 11: + obj.field11 = reader.bytes() + break + case 12: + obj.field12 = AnEnum.codec().decode(reader) + break + case 13: + obj.field13 = SubMessage.codec().decode(reader, reader.uint32()) + break + case 14: + obj.field14 = obj.field14 ?? [] + obj.field14.push(reader.string()) + break + case 15: + obj.field15 = reader.fixed32() + break + case 16: + obj.field16 = reader.fixed64() + break + case 17: + obj.field17 = reader.sfixed32() + break + case 18: + obj.field18 = reader.sfixed64() + break + default: + reader.skipType(tag & 7) + break } } - return { - bufs, - length + obj.field14 = obj.field14 ?? [] + + if (obj.field14 == null) { + throw new Error('Protocol error: value for required field "field14" was not found in protobuf') } - }, { - '1': { name: 'field1', codec: bool, optional: true }, - '2': { name: 'field2', codec: int32, optional: true }, - '3': { name: 'field3', codec: int64, optional: true }, - '4': { name: 'field4', codec: uint32, optional: true }, - '5': { name: 'field5', codec: uint64, optional: true }, - '6': { name: 'field6', codec: sint32, optional: true }, - '7': { name: 'field7', codec: sint64, optional: true }, - '8': { name: 'field8', codec: double, optional: true }, - '9': { name: 'field9', codec: float, optional: true }, - '10': { name: 'field10', codec: string, optional: true }, - '11': { name: 'field11', codec: bytes, optional: true }, - '12': { name: 'field12', codec: AnEnum.codec(), optional: true }, - '13': { name: 'field13', codec: SubMessage.codec(), optional: true }, - '14': { name: 'field14', codec: string, repeats: true }, - '15': { name: 'field15', codec: fixed32, optional: true }, - '16': { name: 'field16', codec: fixed64, optional: true }, - '17': { name: 'field17', codec: sfixed32, optional: true }, - '18': { name: 'field18', codec: sfixed64, optional: true } + + return obj }) } return _codec } - export const encode = (obj: AllTheTypes): Uint8ArrayList => { + export const encode = (obj: AllTheTypes): Uint8Array => { return encodeMessage(obj, AllTheTypes.codec()) } diff --git a/packages/protons/test/index.spec.ts b/packages/protons/test/index.spec.ts index d333db5..060c8d6 100644 --- a/packages/protons/test/index.spec.ts +++ b/packages/protons/test/index.spec.ts @@ -10,6 +10,7 @@ import protobufjs, { Type as PBType } from 'protobufjs' import { Peer } from './fixtures/peer.js' import { CircuitRelay } from './fixtures/circuit.js' import long from 'long' +import { alloc } from 'uint8arrays/alloc' function longifyBigInts (obj: any) { const output = { @@ -59,7 +60,7 @@ describe('encode', () => { field8: 1, field9: 1, field10: 'hello', - field11: Uint8Array.from([1, 2, 3]), + field11: alloc(3).map((_, i) => i), field12: AnEnum.DERP, field13: { foo: 'bar' @@ -121,8 +122,8 @@ describe('encode', () => { field8: -2147483647, field9: -2147483648, field14: [], - field15: -2147483647, - field16: -9223372036854775807n, + field15: 0, + field16: 0n, field17: -2147483647, field18: -9223372036854775807n } diff --git a/packages/protons/tsconfig.json b/packages/protons/tsconfig.json index fbb3170..196cb1e 100644 --- a/packages/protons/tsconfig.json +++ b/packages/protons/tsconfig.json @@ -9,7 +9,8 @@ "test" ], "exclude": [ - "test/fixtures/*.pbjs.ts" + "test/fixtures/*.pbjs.ts", + "test/fixtures/*.protobuf.js" ], "references": [ { From db78f656e96def76d4a882ad57606e69bc95e8c8 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 9 Aug 2022 10:01:26 +0100 Subject: [PATCH 10/14] chore: add missing dep --- packages/protons-runtime/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/protons-runtime/package.json b/packages/protons-runtime/package.json index d3fdf63..12308fc 100644 --- a/packages/protons-runtime/package.json +++ b/packages/protons-runtime/package.json @@ -148,6 +148,7 @@ "release": "aegir release" }, "dependencies": { + "protobufjs": "^7.0.0", "uint8arraylist": "^2.3.2" }, "devDependencies": { From 3b2451f7474309281bc2edcc475a2cad6d75d889 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 9 Aug 2022 10:41:01 +0100 Subject: [PATCH 11/14] chore: deps again --- packages/protons/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/protons/package.json b/packages/protons/package.json index 1bb60fb..bdf7f01 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -162,6 +162,7 @@ "pbjs": "^0.0.14", "protobufjs": "^7.0.0", "protons-runtime": "^2.0.0", - "uint8arraylist": "^2.3.2" + "uint8arraylist": "^2.3.2", + "uint8arrays": "3.1.0" } } From cfadc93c0bc4b02fb55fc7cd3a4cb97b99fe1a73 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 9 Aug 2022 10:41:48 +0100 Subject: [PATCH 12/14] chore: range --- packages/protons/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/protons/package.json b/packages/protons/package.json index bdf7f01..80ef35a 100644 --- a/packages/protons/package.json +++ b/packages/protons/package.json @@ -163,6 +163,6 @@ "protobufjs": "^7.0.0", "protons-runtime": "^2.0.0", "uint8arraylist": "^2.3.2", - "uint8arrays": "3.1.0" + "uint8arrays": "^3.1.0" } } From 406c5a91c1838a4c5342f3d02f4a000a89e40e8e Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 9 Aug 2022 15:01:02 +0100 Subject: [PATCH 13/14] chore: add benchmark --- .../protons-benchmark/src/protobufjs/rpc.d.ts | 1008 +++++++ .../protons-benchmark/src/protobufjs/rpc.js | 2643 +++++++++++++++++ packages/protons-benchmark/src/protons/rpc.ts | 746 +++++ packages/protons-benchmark/src/rpc.proto | 52 + packages/protons-benchmark/src/rpc.ts | 49 + packages/protons-benchmark/tsconfig.json | 3 +- 6 files changed, 4500 insertions(+), 1 deletion(-) create mode 100644 packages/protons-benchmark/src/protobufjs/rpc.d.ts create mode 100644 packages/protons-benchmark/src/protobufjs/rpc.js create mode 100644 packages/protons-benchmark/src/protons/rpc.ts create mode 100644 packages/protons-benchmark/src/rpc.proto create mode 100644 packages/protons-benchmark/src/rpc.ts diff --git a/packages/protons-benchmark/src/protobufjs/rpc.d.ts b/packages/protons-benchmark/src/protobufjs/rpc.d.ts new file mode 100644 index 0000000..c83cd3b --- /dev/null +++ b/packages/protons-benchmark/src/protobufjs/rpc.d.ts @@ -0,0 +1,1008 @@ +import * as $protobuf from "protobufjs"; +/** Properties of a RPC. */ +export interface IRPC { + + /** RPC subscriptions */ + subscriptions?: (RPC.ISubOpts[]|null); + + /** RPC messages */ + messages?: (RPC.IMessage[]|null); + + /** RPC control */ + control?: (RPC.IControlMessage|null); +} + +/** Represents a RPC. */ +export class RPC implements IRPC { + + /** + * Constructs a new RPC. + * @param [properties] Properties to set + */ + constructor(properties?: IRPC); + + /** RPC subscriptions. */ + public subscriptions: RPC.ISubOpts[]; + + /** RPC messages. */ + public messages: RPC.IMessage[]; + + /** RPC control. */ + public control?: (RPC.IControlMessage|null); + + /** RPC _control. */ + public _control?: "control"; + + /** + * Creates a new RPC instance using the specified properties. + * @param [properties] Properties to set + * @returns RPC instance + */ + public static create(properties?: IRPC): RPC; + + /** + * Encodes the specified RPC message. Does not implicitly {@link RPC.verify|verify} messages. + * @param message RPC message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: IRPC, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified RPC message, length delimited. Does not implicitly {@link RPC.verify|verify} messages. + * @param message RPC message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: IRPC, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a RPC message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns RPC + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC; + + /** + * Decodes a RPC message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns RPC + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC; + + /** + * Verifies a RPC message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a RPC message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns RPC + */ + public static fromObject(object: { [k: string]: any }): RPC; + + /** + * Creates a plain object from a RPC message. Also converts values to other types if specified. + * @param message RPC + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this RPC to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for RPC + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; +} + +export namespace RPC { + + /** Properties of a SubOpts. */ + interface ISubOpts { + + /** SubOpts subscribe */ + subscribe?: (boolean|null); + + /** SubOpts topic */ + topic?: (string|null); + } + + /** Represents a SubOpts. */ + class SubOpts implements ISubOpts { + + /** + * Constructs a new SubOpts. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.ISubOpts); + + /** SubOpts subscribe. */ + public subscribe?: (boolean|null); + + /** SubOpts topic. */ + public topic?: (string|null); + + /** SubOpts _subscribe. */ + public _subscribe?: "subscribe"; + + /** SubOpts _topic. */ + public _topic?: "topic"; + + /** + * Creates a new SubOpts instance using the specified properties. + * @param [properties] Properties to set + * @returns SubOpts instance + */ + public static create(properties?: RPC.ISubOpts): RPC.SubOpts; + + /** + * Encodes the specified SubOpts message. Does not implicitly {@link RPC.SubOpts.verify|verify} messages. + * @param message SubOpts message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.ISubOpts, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SubOpts message, length delimited. Does not implicitly {@link RPC.SubOpts.verify|verify} messages. + * @param message SubOpts message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.ISubOpts, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SubOpts message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SubOpts + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.SubOpts; + + /** + * Decodes a SubOpts message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SubOpts + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.SubOpts; + + /** + * Verifies a SubOpts message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SubOpts message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SubOpts + */ + public static fromObject(object: { [k: string]: any }): RPC.SubOpts; + + /** + * Creates a plain object from a SubOpts message. Also converts values to other types if specified. + * @param message SubOpts + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.SubOpts, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SubOpts to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SubOpts + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Message. */ + interface IMessage { + + /** Message from */ + from?: (Uint8Array|null); + + /** Message data */ + data?: (Uint8Array|null); + + /** Message seqno */ + seqno?: (Uint8Array|null); + + /** Message topic */ + topic: string; + + /** Message signature */ + signature?: (Uint8Array|null); + + /** Message key */ + key?: (Uint8Array|null); + } + + /** Represents a Message. */ + class Message implements IMessage { + + /** + * Constructs a new Message. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IMessage); + + /** Message from. */ + public from?: (Uint8Array|null); + + /** Message data. */ + public data?: (Uint8Array|null); + + /** Message seqno. */ + public seqno?: (Uint8Array|null); + + /** Message topic. */ + public topic: string; + + /** Message signature. */ + public signature?: (Uint8Array|null); + + /** Message key. */ + public key?: (Uint8Array|null); + + /** Message _from. */ + public _from?: "from"; + + /** Message _data. */ + public _data?: "data"; + + /** Message _seqno. */ + public _seqno?: "seqno"; + + /** Message _signature. */ + public _signature?: "signature"; + + /** Message _key. */ + public _key?: "key"; + + /** + * Creates a new Message instance using the specified properties. + * @param [properties] Properties to set + * @returns Message instance + */ + public static create(properties?: RPC.IMessage): RPC.Message; + + /** + * Encodes the specified Message message. Does not implicitly {@link RPC.Message.verify|verify} messages. + * @param message Message message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IMessage, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Message message, length delimited. Does not implicitly {@link RPC.Message.verify|verify} messages. + * @param message Message message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IMessage, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Message message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Message + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.Message; + + /** + * Decodes a Message message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Message + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.Message; + + /** + * Verifies a Message message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Message message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Message + */ + public static fromObject(object: { [k: string]: any }): RPC.Message; + + /** + * Creates a plain object from a Message message. Also converts values to other types if specified. + * @param message Message + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.Message, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Message to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Message + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ControlMessage. */ + interface IControlMessage { + + /** ControlMessage ihave */ + ihave?: (RPC.IControlIHave[]|null); + + /** ControlMessage iwant */ + iwant?: (RPC.IControlIWant[]|null); + + /** ControlMessage graft */ + graft?: (RPC.IControlGraft[]|null); + + /** ControlMessage prune */ + prune?: (RPC.IControlPrune[]|null); + } + + /** Represents a ControlMessage. */ + class ControlMessage implements IControlMessage { + + /** + * Constructs a new ControlMessage. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IControlMessage); + + /** ControlMessage ihave. */ + public ihave: RPC.IControlIHave[]; + + /** ControlMessage iwant. */ + public iwant: RPC.IControlIWant[]; + + /** ControlMessage graft. */ + public graft: RPC.IControlGraft[]; + + /** ControlMessage prune. */ + public prune: RPC.IControlPrune[]; + + /** + * Creates a new ControlMessage instance using the specified properties. + * @param [properties] Properties to set + * @returns ControlMessage instance + */ + public static create(properties?: RPC.IControlMessage): RPC.ControlMessage; + + /** + * Encodes the specified ControlMessage message. Does not implicitly {@link RPC.ControlMessage.verify|verify} messages. + * @param message ControlMessage message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IControlMessage, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ControlMessage message, length delimited. Does not implicitly {@link RPC.ControlMessage.verify|verify} messages. + * @param message ControlMessage message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IControlMessage, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ControlMessage message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ControlMessage + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.ControlMessage; + + /** + * Decodes a ControlMessage message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ControlMessage + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.ControlMessage; + + /** + * Verifies a ControlMessage message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ControlMessage message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ControlMessage + */ + public static fromObject(object: { [k: string]: any }): RPC.ControlMessage; + + /** + * Creates a plain object from a ControlMessage message. Also converts values to other types if specified. + * @param message ControlMessage + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.ControlMessage, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ControlMessage to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ControlMessage + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ControlIHave. */ + interface IControlIHave { + + /** ControlIHave topicID */ + topicID?: (string|null); + + /** ControlIHave messageIDs */ + messageIDs?: (Uint8Array[]|null); + } + + /** Represents a ControlIHave. */ + class ControlIHave implements IControlIHave { + + /** + * Constructs a new ControlIHave. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IControlIHave); + + /** ControlIHave topicID. */ + public topicID?: (string|null); + + /** ControlIHave messageIDs. */ + public messageIDs: Uint8Array[]; + + /** ControlIHave _topicID. */ + public _topicID?: "topicID"; + + /** + * Creates a new ControlIHave instance using the specified properties. + * @param [properties] Properties to set + * @returns ControlIHave instance + */ + public static create(properties?: RPC.IControlIHave): RPC.ControlIHave; + + /** + * Encodes the specified ControlIHave message. Does not implicitly {@link RPC.ControlIHave.verify|verify} messages. + * @param message ControlIHave message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IControlIHave, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ControlIHave message, length delimited. Does not implicitly {@link RPC.ControlIHave.verify|verify} messages. + * @param message ControlIHave message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IControlIHave, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ControlIHave message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ControlIHave + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.ControlIHave; + + /** + * Decodes a ControlIHave message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ControlIHave + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.ControlIHave; + + /** + * Verifies a ControlIHave message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ControlIHave message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ControlIHave + */ + public static fromObject(object: { [k: string]: any }): RPC.ControlIHave; + + /** + * Creates a plain object from a ControlIHave message. Also converts values to other types if specified. + * @param message ControlIHave + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.ControlIHave, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ControlIHave to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ControlIHave + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ControlIWant. */ + interface IControlIWant { + + /** ControlIWant messageIDs */ + messageIDs?: (Uint8Array[]|null); + } + + /** Represents a ControlIWant. */ + class ControlIWant implements IControlIWant { + + /** + * Constructs a new ControlIWant. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IControlIWant); + + /** ControlIWant messageIDs. */ + public messageIDs: Uint8Array[]; + + /** + * Creates a new ControlIWant instance using the specified properties. + * @param [properties] Properties to set + * @returns ControlIWant instance + */ + public static create(properties?: RPC.IControlIWant): RPC.ControlIWant; + + /** + * Encodes the specified ControlIWant message. Does not implicitly {@link RPC.ControlIWant.verify|verify} messages. + * @param message ControlIWant message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IControlIWant, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ControlIWant message, length delimited. Does not implicitly {@link RPC.ControlIWant.verify|verify} messages. + * @param message ControlIWant message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IControlIWant, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ControlIWant message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ControlIWant + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.ControlIWant; + + /** + * Decodes a ControlIWant message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ControlIWant + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.ControlIWant; + + /** + * Verifies a ControlIWant message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ControlIWant message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ControlIWant + */ + public static fromObject(object: { [k: string]: any }): RPC.ControlIWant; + + /** + * Creates a plain object from a ControlIWant message. Also converts values to other types if specified. + * @param message ControlIWant + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.ControlIWant, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ControlIWant to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ControlIWant + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ControlGraft. */ + interface IControlGraft { + + /** ControlGraft topicID */ + topicID?: (string|null); + } + + /** Represents a ControlGraft. */ + class ControlGraft implements IControlGraft { + + /** + * Constructs a new ControlGraft. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IControlGraft); + + /** ControlGraft topicID. */ + public topicID?: (string|null); + + /** ControlGraft _topicID. */ + public _topicID?: "topicID"; + + /** + * Creates a new ControlGraft instance using the specified properties. + * @param [properties] Properties to set + * @returns ControlGraft instance + */ + public static create(properties?: RPC.IControlGraft): RPC.ControlGraft; + + /** + * Encodes the specified ControlGraft message. Does not implicitly {@link RPC.ControlGraft.verify|verify} messages. + * @param message ControlGraft message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IControlGraft, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ControlGraft message, length delimited. Does not implicitly {@link RPC.ControlGraft.verify|verify} messages. + * @param message ControlGraft message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IControlGraft, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ControlGraft message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ControlGraft + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.ControlGraft; + + /** + * Decodes a ControlGraft message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ControlGraft + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.ControlGraft; + + /** + * Verifies a ControlGraft message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ControlGraft message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ControlGraft + */ + public static fromObject(object: { [k: string]: any }): RPC.ControlGraft; + + /** + * Creates a plain object from a ControlGraft message. Also converts values to other types if specified. + * @param message ControlGraft + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.ControlGraft, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ControlGraft to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ControlGraft + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ControlPrune. */ + interface IControlPrune { + + /** ControlPrune topicID */ + topicID?: (string|null); + + /** ControlPrune peers */ + peers?: (RPC.IPeerInfo[]|null); + + /** ControlPrune backoff */ + backoff?: (number|Long|null); + } + + /** Represents a ControlPrune. */ + class ControlPrune implements IControlPrune { + + /** + * Constructs a new ControlPrune. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IControlPrune); + + /** ControlPrune topicID. */ + public topicID?: (string|null); + + /** ControlPrune peers. */ + public peers: RPC.IPeerInfo[]; + + /** ControlPrune backoff. */ + public backoff?: (number|Long|null); + + /** ControlPrune _topicID. */ + public _topicID?: "topicID"; + + /** ControlPrune _backoff. */ + public _backoff?: "backoff"; + + /** + * Creates a new ControlPrune instance using the specified properties. + * @param [properties] Properties to set + * @returns ControlPrune instance + */ + public static create(properties?: RPC.IControlPrune): RPC.ControlPrune; + + /** + * Encodes the specified ControlPrune message. Does not implicitly {@link RPC.ControlPrune.verify|verify} messages. + * @param message ControlPrune message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IControlPrune, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ControlPrune message, length delimited. Does not implicitly {@link RPC.ControlPrune.verify|verify} messages. + * @param message ControlPrune message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IControlPrune, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ControlPrune message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ControlPrune + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.ControlPrune; + + /** + * Decodes a ControlPrune message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ControlPrune + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.ControlPrune; + + /** + * Verifies a ControlPrune message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ControlPrune message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ControlPrune + */ + public static fromObject(object: { [k: string]: any }): RPC.ControlPrune; + + /** + * Creates a plain object from a ControlPrune message. Also converts values to other types if specified. + * @param message ControlPrune + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.ControlPrune, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ControlPrune to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ControlPrune + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PeerInfo. */ + interface IPeerInfo { + + /** PeerInfo peerID */ + peerID?: (Uint8Array|null); + + /** PeerInfo signedPeerRecord */ + signedPeerRecord?: (Uint8Array|null); + } + + /** Represents a PeerInfo. */ + class PeerInfo implements IPeerInfo { + + /** + * Constructs a new PeerInfo. + * @param [properties] Properties to set + */ + constructor(properties?: RPC.IPeerInfo); + + /** PeerInfo peerID. */ + public peerID?: (Uint8Array|null); + + /** PeerInfo signedPeerRecord. */ + public signedPeerRecord?: (Uint8Array|null); + + /** PeerInfo _peerID. */ + public _peerID?: "peerID"; + + /** PeerInfo _signedPeerRecord. */ + public _signedPeerRecord?: "signedPeerRecord"; + + /** + * Creates a new PeerInfo instance using the specified properties. + * @param [properties] Properties to set + * @returns PeerInfo instance + */ + public static create(properties?: RPC.IPeerInfo): RPC.PeerInfo; + + /** + * Encodes the specified PeerInfo message. Does not implicitly {@link RPC.PeerInfo.verify|verify} messages. + * @param message PeerInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: RPC.IPeerInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PeerInfo message, length delimited. Does not implicitly {@link RPC.PeerInfo.verify|verify} messages. + * @param message PeerInfo message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: RPC.IPeerInfo, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PeerInfo message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PeerInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): RPC.PeerInfo; + + /** + * Decodes a PeerInfo message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PeerInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): RPC.PeerInfo; + + /** + * Verifies a PeerInfo message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PeerInfo message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PeerInfo + */ + public static fromObject(object: { [k: string]: any }): RPC.PeerInfo; + + /** + * Creates a plain object from a PeerInfo message. Also converts values to other types if specified. + * @param message PeerInfo + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: RPC.PeerInfo, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PeerInfo to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PeerInfo + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } +} diff --git a/packages/protons-benchmark/src/protobufjs/rpc.js b/packages/protons-benchmark/src/protobufjs/rpc.js new file mode 100644 index 0000000..9f6c59a --- /dev/null +++ b/packages/protons-benchmark/src/protobufjs/rpc.js @@ -0,0 +1,2643 @@ +/*eslint-disable*/ +// @ts-nocheck + +import $protobuf from "protobufjs/minimal.js"; + +// Common aliases +const $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + +// Exported root namespace +const $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + +export const RPC = $root.RPC = (() => { + + /** + * Properties of a RPC. + * @exports IRPC + * @interface IRPC + * @property {Array.|null} [subscriptions] RPC subscriptions + * @property {Array.|null} [messages] RPC messages + * @property {RPC.IControlMessage|null} [control] RPC control + */ + + /** + * Constructs a new RPC. + * @exports RPC + * @classdesc Represents a RPC. + * @implements IRPC + * @constructor + * @param {IRPC=} [properties] Properties to set + */ + function RPC(properties) { + this.subscriptions = []; + this.messages = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * RPC subscriptions. + * @member {Array.} subscriptions + * @memberof RPC + * @instance + */ + RPC.prototype.subscriptions = $util.emptyArray; + + /** + * RPC messages. + * @member {Array.} messages + * @memberof RPC + * @instance + */ + RPC.prototype.messages = $util.emptyArray; + + /** + * RPC control. + * @member {RPC.IControlMessage|null|undefined} control + * @memberof RPC + * @instance + */ + RPC.prototype.control = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * RPC _control. + * @member {"control"|undefined} _control + * @memberof RPC + * @instance + */ + Object.defineProperty(RPC.prototype, "_control", { + get: $util.oneOfGetter($oneOfFields = ["control"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new RPC instance using the specified properties. + * @function create + * @memberof RPC + * @static + * @param {IRPC=} [properties] Properties to set + * @returns {RPC} RPC instance + */ + RPC.create = function create(properties) { + return new RPC(properties); + }; + + /** + * Encodes the specified RPC message. Does not implicitly {@link RPC.verify|verify} messages. + * @function encode + * @memberof RPC + * @static + * @param {IRPC} message RPC message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RPC.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.subscriptions != null && message.subscriptions.length) + for (let i = 0; i < message.subscriptions.length; ++i) + $root.RPC.SubOpts.encode(message.subscriptions[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.messages != null && message.messages.length) + for (let i = 0; i < message.messages.length; ++i) + $root.RPC.Message.encode(message.messages[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.control != null && Object.hasOwnProperty.call(message, "control")) + $root.RPC.ControlMessage.encode(message.control, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified RPC message, length delimited. Does not implicitly {@link RPC.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC + * @static + * @param {IRPC} message RPC message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + RPC.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a RPC message from the specified reader or buffer. + * @function decode + * @memberof RPC + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC} RPC + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RPC.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.subscriptions && message.subscriptions.length)) + message.subscriptions = []; + message.subscriptions.push($root.RPC.SubOpts.decode(reader, reader.uint32())); + break; + } + case 2: { + if (!(message.messages && message.messages.length)) + message.messages = []; + message.messages.push($root.RPC.Message.decode(reader, reader.uint32())); + break; + } + case 3: { + message.control = $root.RPC.ControlMessage.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a RPC message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC} RPC + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + RPC.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a RPC message. + * @function verify + * @memberof RPC + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + RPC.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.subscriptions != null && message.hasOwnProperty("subscriptions")) { + if (!Array.isArray(message.subscriptions)) + return "subscriptions: array expected"; + for (let i = 0; i < message.subscriptions.length; ++i) { + let error = $root.RPC.SubOpts.verify(message.subscriptions[i]); + if (error) + return "subscriptions." + error; + } + } + if (message.messages != null && message.hasOwnProperty("messages")) { + if (!Array.isArray(message.messages)) + return "messages: array expected"; + for (let i = 0; i < message.messages.length; ++i) { + let error = $root.RPC.Message.verify(message.messages[i]); + if (error) + return "messages." + error; + } + } + if (message.control != null && message.hasOwnProperty("control")) { + properties._control = 1; + { + let error = $root.RPC.ControlMessage.verify(message.control); + if (error) + return "control." + error; + } + } + return null; + }; + + /** + * Creates a RPC message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC + * @static + * @param {Object.} object Plain object + * @returns {RPC} RPC + */ + RPC.fromObject = function fromObject(object) { + if (object instanceof $root.RPC) + return object; + let message = new $root.RPC(); + if (object.subscriptions) { + if (!Array.isArray(object.subscriptions)) + throw TypeError(".RPC.subscriptions: array expected"); + message.subscriptions = []; + for (let i = 0; i < object.subscriptions.length; ++i) { + if (typeof object.subscriptions[i] !== "object") + throw TypeError(".RPC.subscriptions: object expected"); + message.subscriptions[i] = $root.RPC.SubOpts.fromObject(object.subscriptions[i]); + } + } + if (object.messages) { + if (!Array.isArray(object.messages)) + throw TypeError(".RPC.messages: array expected"); + message.messages = []; + for (let i = 0; i < object.messages.length; ++i) { + if (typeof object.messages[i] !== "object") + throw TypeError(".RPC.messages: object expected"); + message.messages[i] = $root.RPC.Message.fromObject(object.messages[i]); + } + } + if (object.control != null) { + if (typeof object.control !== "object") + throw TypeError(".RPC.control: object expected"); + message.control = $root.RPC.ControlMessage.fromObject(object.control); + } + return message; + }; + + /** + * Creates a plain object from a RPC message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC + * @static + * @param {RPC} message RPC + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + RPC.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) { + object.subscriptions = []; + object.messages = []; + } + if (message.subscriptions && message.subscriptions.length) { + object.subscriptions = []; + for (let j = 0; j < message.subscriptions.length; ++j) + object.subscriptions[j] = $root.RPC.SubOpts.toObject(message.subscriptions[j], options); + } + if (message.messages && message.messages.length) { + object.messages = []; + for (let j = 0; j < message.messages.length; ++j) + object.messages[j] = $root.RPC.Message.toObject(message.messages[j], options); + } + if (message.control != null && message.hasOwnProperty("control")) { + object.control = $root.RPC.ControlMessage.toObject(message.control, options); + if (options.oneofs) + object._control = "control"; + } + return object; + }; + + /** + * Converts this RPC to JSON. + * @function toJSON + * @memberof RPC + * @instance + * @returns {Object.} JSON object + */ + RPC.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for RPC + * @function getTypeUrl + * @memberof RPC + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + RPC.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC"; + }; + + RPC.SubOpts = (function() { + + /** + * Properties of a SubOpts. + * @memberof RPC + * @interface ISubOpts + * @property {boolean|null} [subscribe] SubOpts subscribe + * @property {string|null} [topic] SubOpts topic + */ + + /** + * Constructs a new SubOpts. + * @memberof RPC + * @classdesc Represents a SubOpts. + * @implements ISubOpts + * @constructor + * @param {RPC.ISubOpts=} [properties] Properties to set + */ + function SubOpts(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SubOpts subscribe. + * @member {boolean|null|undefined} subscribe + * @memberof RPC.SubOpts + * @instance + */ + SubOpts.prototype.subscribe = null; + + /** + * SubOpts topic. + * @member {string|null|undefined} topic + * @memberof RPC.SubOpts + * @instance + */ + SubOpts.prototype.topic = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * SubOpts _subscribe. + * @member {"subscribe"|undefined} _subscribe + * @memberof RPC.SubOpts + * @instance + */ + Object.defineProperty(SubOpts.prototype, "_subscribe", { + get: $util.oneOfGetter($oneOfFields = ["subscribe"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * SubOpts _topic. + * @member {"topic"|undefined} _topic + * @memberof RPC.SubOpts + * @instance + */ + Object.defineProperty(SubOpts.prototype, "_topic", { + get: $util.oneOfGetter($oneOfFields = ["topic"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new SubOpts instance using the specified properties. + * @function create + * @memberof RPC.SubOpts + * @static + * @param {RPC.ISubOpts=} [properties] Properties to set + * @returns {RPC.SubOpts} SubOpts instance + */ + SubOpts.create = function create(properties) { + return new SubOpts(properties); + }; + + /** + * Encodes the specified SubOpts message. Does not implicitly {@link RPC.SubOpts.verify|verify} messages. + * @function encode + * @memberof RPC.SubOpts + * @static + * @param {RPC.ISubOpts} message SubOpts message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SubOpts.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.subscribe != null && Object.hasOwnProperty.call(message, "subscribe")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.subscribe); + if (message.topic != null && Object.hasOwnProperty.call(message, "topic")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.topic); + return writer; + }; + + /** + * Encodes the specified SubOpts message, length delimited. Does not implicitly {@link RPC.SubOpts.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.SubOpts + * @static + * @param {RPC.ISubOpts} message SubOpts message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SubOpts.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SubOpts message from the specified reader or buffer. + * @function decode + * @memberof RPC.SubOpts + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.SubOpts} SubOpts + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SubOpts.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.SubOpts(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.subscribe = reader.bool(); + break; + } + case 2: { + message.topic = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SubOpts message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.SubOpts + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.SubOpts} SubOpts + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SubOpts.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SubOpts message. + * @function verify + * @memberof RPC.SubOpts + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SubOpts.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.subscribe != null && message.hasOwnProperty("subscribe")) { + properties._subscribe = 1; + if (typeof message.subscribe !== "boolean") + return "subscribe: boolean expected"; + } + if (message.topic != null && message.hasOwnProperty("topic")) { + properties._topic = 1; + if (!$util.isString(message.topic)) + return "topic: string expected"; + } + return null; + }; + + /** + * Creates a SubOpts message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.SubOpts + * @static + * @param {Object.} object Plain object + * @returns {RPC.SubOpts} SubOpts + */ + SubOpts.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.SubOpts) + return object; + let message = new $root.RPC.SubOpts(); + if (object.subscribe != null) + message.subscribe = Boolean(object.subscribe); + if (object.topic != null) + message.topic = String(object.topic); + return message; + }; + + /** + * Creates a plain object from a SubOpts message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.SubOpts + * @static + * @param {RPC.SubOpts} message SubOpts + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SubOpts.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (message.subscribe != null && message.hasOwnProperty("subscribe")) { + object.subscribe = message.subscribe; + if (options.oneofs) + object._subscribe = "subscribe"; + } + if (message.topic != null && message.hasOwnProperty("topic")) { + object.topic = message.topic; + if (options.oneofs) + object._topic = "topic"; + } + return object; + }; + + /** + * Converts this SubOpts to JSON. + * @function toJSON + * @memberof RPC.SubOpts + * @instance + * @returns {Object.} JSON object + */ + SubOpts.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SubOpts + * @function getTypeUrl + * @memberof RPC.SubOpts + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SubOpts.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.SubOpts"; + }; + + return SubOpts; + })(); + + RPC.Message = (function() { + + /** + * Properties of a Message. + * @memberof RPC + * @interface IMessage + * @property {Uint8Array|null} [from] Message from + * @property {Uint8Array|null} [data] Message data + * @property {Uint8Array|null} [seqno] Message seqno + * @property {string} topic Message topic + * @property {Uint8Array|null} [signature] Message signature + * @property {Uint8Array|null} [key] Message key + */ + + /** + * Constructs a new Message. + * @memberof RPC + * @classdesc Represents a Message. + * @implements IMessage + * @constructor + * @param {RPC.IMessage=} [properties] Properties to set + */ + function Message(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Message from. + * @member {Uint8Array|null|undefined} from + * @memberof RPC.Message + * @instance + */ + Message.prototype.from = null; + + /** + * Message data. + * @member {Uint8Array|null|undefined} data + * @memberof RPC.Message + * @instance + */ + Message.prototype.data = null; + + /** + * Message seqno. + * @member {Uint8Array|null|undefined} seqno + * @memberof RPC.Message + * @instance + */ + Message.prototype.seqno = null; + + /** + * Message topic. + * @member {string} topic + * @memberof RPC.Message + * @instance + */ + Message.prototype.topic = ""; + + /** + * Message signature. + * @member {Uint8Array|null|undefined} signature + * @memberof RPC.Message + * @instance + */ + Message.prototype.signature = null; + + /** + * Message key. + * @member {Uint8Array|null|undefined} key + * @memberof RPC.Message + * @instance + */ + Message.prototype.key = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * Message _from. + * @member {"from"|undefined} _from + * @memberof RPC.Message + * @instance + */ + Object.defineProperty(Message.prototype, "_from", { + get: $util.oneOfGetter($oneOfFields = ["from"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Message _data. + * @member {"data"|undefined} _data + * @memberof RPC.Message + * @instance + */ + Object.defineProperty(Message.prototype, "_data", { + get: $util.oneOfGetter($oneOfFields = ["data"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Message _seqno. + * @member {"seqno"|undefined} _seqno + * @memberof RPC.Message + * @instance + */ + Object.defineProperty(Message.prototype, "_seqno", { + get: $util.oneOfGetter($oneOfFields = ["seqno"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Message _signature. + * @member {"signature"|undefined} _signature + * @memberof RPC.Message + * @instance + */ + Object.defineProperty(Message.prototype, "_signature", { + get: $util.oneOfGetter($oneOfFields = ["signature"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Message _key. + * @member {"key"|undefined} _key + * @memberof RPC.Message + * @instance + */ + Object.defineProperty(Message.prototype, "_key", { + get: $util.oneOfGetter($oneOfFields = ["key"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new Message instance using the specified properties. + * @function create + * @memberof RPC.Message + * @static + * @param {RPC.IMessage=} [properties] Properties to set + * @returns {RPC.Message} Message instance + */ + Message.create = function create(properties) { + return new Message(properties); + }; + + /** + * Encodes the specified Message message. Does not implicitly {@link RPC.Message.verify|verify} messages. + * @function encode + * @memberof RPC.Message + * @static + * @param {RPC.IMessage} message Message message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Message.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.from != null && Object.hasOwnProperty.call(message, "from")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.from); + if (message.data != null && Object.hasOwnProperty.call(message, "data")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.data); + if (message.seqno != null && Object.hasOwnProperty.call(message, "seqno")) + writer.uint32(/* id 3, wireType 2 =*/26).bytes(message.seqno); + writer.uint32(/* id 4, wireType 2 =*/34).string(message.topic); + if (message.signature != null && Object.hasOwnProperty.call(message, "signature")) + writer.uint32(/* id 5, wireType 2 =*/42).bytes(message.signature); + if (message.key != null && Object.hasOwnProperty.call(message, "key")) + writer.uint32(/* id 6, wireType 2 =*/50).bytes(message.key); + return writer; + }; + + /** + * Encodes the specified Message message, length delimited. Does not implicitly {@link RPC.Message.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.Message + * @static + * @param {RPC.IMessage} message Message message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Message.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Message message from the specified reader or buffer. + * @function decode + * @memberof RPC.Message + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.Message} Message + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Message.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.Message(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.from = reader.bytes(); + break; + } + case 2: { + message.data = reader.bytes(); + break; + } + case 3: { + message.seqno = reader.bytes(); + break; + } + case 4: { + message.topic = reader.string(); + break; + } + case 5: { + message.signature = reader.bytes(); + break; + } + case 6: { + message.key = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + if (!message.hasOwnProperty("topic")) + throw $util.ProtocolError("missing required 'topic'", { instance: message }); + return message; + }; + + /** + * Decodes a Message message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.Message + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.Message} Message + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Message.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Message message. + * @function verify + * @memberof RPC.Message + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Message.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.from != null && message.hasOwnProperty("from")) { + properties._from = 1; + if (!(message.from && typeof message.from.length === "number" || $util.isString(message.from))) + return "from: buffer expected"; + } + if (message.data != null && message.hasOwnProperty("data")) { + properties._data = 1; + if (!(message.data && typeof message.data.length === "number" || $util.isString(message.data))) + return "data: buffer expected"; + } + if (message.seqno != null && message.hasOwnProperty("seqno")) { + properties._seqno = 1; + if (!(message.seqno && typeof message.seqno.length === "number" || $util.isString(message.seqno))) + return "seqno: buffer expected"; + } + if (!$util.isString(message.topic)) + return "topic: string expected"; + if (message.signature != null && message.hasOwnProperty("signature")) { + properties._signature = 1; + if (!(message.signature && typeof message.signature.length === "number" || $util.isString(message.signature))) + return "signature: buffer expected"; + } + if (message.key != null && message.hasOwnProperty("key")) { + properties._key = 1; + if (!(message.key && typeof message.key.length === "number" || $util.isString(message.key))) + return "key: buffer expected"; + } + return null; + }; + + /** + * Creates a Message message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.Message + * @static + * @param {Object.} object Plain object + * @returns {RPC.Message} Message + */ + Message.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.Message) + return object; + let message = new $root.RPC.Message(); + if (object.from != null) + if (typeof object.from === "string") + $util.base64.decode(object.from, message.from = $util.newBuffer($util.base64.length(object.from)), 0); + else if (object.from.length >= 0) + message.from = object.from; + if (object.data != null) + if (typeof object.data === "string") + $util.base64.decode(object.data, message.data = $util.newBuffer($util.base64.length(object.data)), 0); + else if (object.data.length >= 0) + message.data = object.data; + if (object.seqno != null) + if (typeof object.seqno === "string") + $util.base64.decode(object.seqno, message.seqno = $util.newBuffer($util.base64.length(object.seqno)), 0); + else if (object.seqno.length >= 0) + message.seqno = object.seqno; + if (object.topic != null) + message.topic = String(object.topic); + if (object.signature != null) + if (typeof object.signature === "string") + $util.base64.decode(object.signature, message.signature = $util.newBuffer($util.base64.length(object.signature)), 0); + else if (object.signature.length >= 0) + message.signature = object.signature; + if (object.key != null) + if (typeof object.key === "string") + $util.base64.decode(object.key, message.key = $util.newBuffer($util.base64.length(object.key)), 0); + else if (object.key.length >= 0) + message.key = object.key; + return message; + }; + + /** + * Creates a plain object from a Message message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.Message + * @static + * @param {RPC.Message} message Message + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Message.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.defaults) + object.topic = ""; + if (message.from != null && message.hasOwnProperty("from")) { + object.from = options.bytes === String ? $util.base64.encode(message.from, 0, message.from.length) : options.bytes === Array ? Array.prototype.slice.call(message.from) : message.from; + if (options.oneofs) + object._from = "from"; + } + if (message.data != null && message.hasOwnProperty("data")) { + object.data = options.bytes === String ? $util.base64.encode(message.data, 0, message.data.length) : options.bytes === Array ? Array.prototype.slice.call(message.data) : message.data; + if (options.oneofs) + object._data = "data"; + } + if (message.seqno != null && message.hasOwnProperty("seqno")) { + object.seqno = options.bytes === String ? $util.base64.encode(message.seqno, 0, message.seqno.length) : options.bytes === Array ? Array.prototype.slice.call(message.seqno) : message.seqno; + if (options.oneofs) + object._seqno = "seqno"; + } + if (message.topic != null && message.hasOwnProperty("topic")) + object.topic = message.topic; + if (message.signature != null && message.hasOwnProperty("signature")) { + object.signature = options.bytes === String ? $util.base64.encode(message.signature, 0, message.signature.length) : options.bytes === Array ? Array.prototype.slice.call(message.signature) : message.signature; + if (options.oneofs) + object._signature = "signature"; + } + if (message.key != null && message.hasOwnProperty("key")) { + object.key = options.bytes === String ? $util.base64.encode(message.key, 0, message.key.length) : options.bytes === Array ? Array.prototype.slice.call(message.key) : message.key; + if (options.oneofs) + object._key = "key"; + } + return object; + }; + + /** + * Converts this Message to JSON. + * @function toJSON + * @memberof RPC.Message + * @instance + * @returns {Object.} JSON object + */ + Message.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Message + * @function getTypeUrl + * @memberof RPC.Message + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Message.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.Message"; + }; + + return Message; + })(); + + RPC.ControlMessage = (function() { + + /** + * Properties of a ControlMessage. + * @memberof RPC + * @interface IControlMessage + * @property {Array.|null} [ihave] ControlMessage ihave + * @property {Array.|null} [iwant] ControlMessage iwant + * @property {Array.|null} [graft] ControlMessage graft + * @property {Array.|null} [prune] ControlMessage prune + */ + + /** + * Constructs a new ControlMessage. + * @memberof RPC + * @classdesc Represents a ControlMessage. + * @implements IControlMessage + * @constructor + * @param {RPC.IControlMessage=} [properties] Properties to set + */ + function ControlMessage(properties) { + this.ihave = []; + this.iwant = []; + this.graft = []; + this.prune = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ControlMessage ihave. + * @member {Array.} ihave + * @memberof RPC.ControlMessage + * @instance + */ + ControlMessage.prototype.ihave = $util.emptyArray; + + /** + * ControlMessage iwant. + * @member {Array.} iwant + * @memberof RPC.ControlMessage + * @instance + */ + ControlMessage.prototype.iwant = $util.emptyArray; + + /** + * ControlMessage graft. + * @member {Array.} graft + * @memberof RPC.ControlMessage + * @instance + */ + ControlMessage.prototype.graft = $util.emptyArray; + + /** + * ControlMessage prune. + * @member {Array.} prune + * @memberof RPC.ControlMessage + * @instance + */ + ControlMessage.prototype.prune = $util.emptyArray; + + /** + * Creates a new ControlMessage instance using the specified properties. + * @function create + * @memberof RPC.ControlMessage + * @static + * @param {RPC.IControlMessage=} [properties] Properties to set + * @returns {RPC.ControlMessage} ControlMessage instance + */ + ControlMessage.create = function create(properties) { + return new ControlMessage(properties); + }; + + /** + * Encodes the specified ControlMessage message. Does not implicitly {@link RPC.ControlMessage.verify|verify} messages. + * @function encode + * @memberof RPC.ControlMessage + * @static + * @param {RPC.IControlMessage} message ControlMessage message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlMessage.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.ihave != null && message.ihave.length) + for (let i = 0; i < message.ihave.length; ++i) + $root.RPC.ControlIHave.encode(message.ihave[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.iwant != null && message.iwant.length) + for (let i = 0; i < message.iwant.length; ++i) + $root.RPC.ControlIWant.encode(message.iwant[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.graft != null && message.graft.length) + for (let i = 0; i < message.graft.length; ++i) + $root.RPC.ControlGraft.encode(message.graft[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.prune != null && message.prune.length) + for (let i = 0; i < message.prune.length; ++i) + $root.RPC.ControlPrune.encode(message.prune[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ControlMessage message, length delimited. Does not implicitly {@link RPC.ControlMessage.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.ControlMessage + * @static + * @param {RPC.IControlMessage} message ControlMessage message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlMessage.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ControlMessage message from the specified reader or buffer. + * @function decode + * @memberof RPC.ControlMessage + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.ControlMessage} ControlMessage + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlMessage.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.ControlMessage(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.ihave && message.ihave.length)) + message.ihave = []; + message.ihave.push($root.RPC.ControlIHave.decode(reader, reader.uint32())); + break; + } + case 2: { + if (!(message.iwant && message.iwant.length)) + message.iwant = []; + message.iwant.push($root.RPC.ControlIWant.decode(reader, reader.uint32())); + break; + } + case 3: { + if (!(message.graft && message.graft.length)) + message.graft = []; + message.graft.push($root.RPC.ControlGraft.decode(reader, reader.uint32())); + break; + } + case 4: { + if (!(message.prune && message.prune.length)) + message.prune = []; + message.prune.push($root.RPC.ControlPrune.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ControlMessage message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.ControlMessage + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.ControlMessage} ControlMessage + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlMessage.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ControlMessage message. + * @function verify + * @memberof RPC.ControlMessage + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ControlMessage.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.ihave != null && message.hasOwnProperty("ihave")) { + if (!Array.isArray(message.ihave)) + return "ihave: array expected"; + for (let i = 0; i < message.ihave.length; ++i) { + let error = $root.RPC.ControlIHave.verify(message.ihave[i]); + if (error) + return "ihave." + error; + } + } + if (message.iwant != null && message.hasOwnProperty("iwant")) { + if (!Array.isArray(message.iwant)) + return "iwant: array expected"; + for (let i = 0; i < message.iwant.length; ++i) { + let error = $root.RPC.ControlIWant.verify(message.iwant[i]); + if (error) + return "iwant." + error; + } + } + if (message.graft != null && message.hasOwnProperty("graft")) { + if (!Array.isArray(message.graft)) + return "graft: array expected"; + for (let i = 0; i < message.graft.length; ++i) { + let error = $root.RPC.ControlGraft.verify(message.graft[i]); + if (error) + return "graft." + error; + } + } + if (message.prune != null && message.hasOwnProperty("prune")) { + if (!Array.isArray(message.prune)) + return "prune: array expected"; + for (let i = 0; i < message.prune.length; ++i) { + let error = $root.RPC.ControlPrune.verify(message.prune[i]); + if (error) + return "prune." + error; + } + } + return null; + }; + + /** + * Creates a ControlMessage message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.ControlMessage + * @static + * @param {Object.} object Plain object + * @returns {RPC.ControlMessage} ControlMessage + */ + ControlMessage.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.ControlMessage) + return object; + let message = new $root.RPC.ControlMessage(); + if (object.ihave) { + if (!Array.isArray(object.ihave)) + throw TypeError(".RPC.ControlMessage.ihave: array expected"); + message.ihave = []; + for (let i = 0; i < object.ihave.length; ++i) { + if (typeof object.ihave[i] !== "object") + throw TypeError(".RPC.ControlMessage.ihave: object expected"); + message.ihave[i] = $root.RPC.ControlIHave.fromObject(object.ihave[i]); + } + } + if (object.iwant) { + if (!Array.isArray(object.iwant)) + throw TypeError(".RPC.ControlMessage.iwant: array expected"); + message.iwant = []; + for (let i = 0; i < object.iwant.length; ++i) { + if (typeof object.iwant[i] !== "object") + throw TypeError(".RPC.ControlMessage.iwant: object expected"); + message.iwant[i] = $root.RPC.ControlIWant.fromObject(object.iwant[i]); + } + } + if (object.graft) { + if (!Array.isArray(object.graft)) + throw TypeError(".RPC.ControlMessage.graft: array expected"); + message.graft = []; + for (let i = 0; i < object.graft.length; ++i) { + if (typeof object.graft[i] !== "object") + throw TypeError(".RPC.ControlMessage.graft: object expected"); + message.graft[i] = $root.RPC.ControlGraft.fromObject(object.graft[i]); + } + } + if (object.prune) { + if (!Array.isArray(object.prune)) + throw TypeError(".RPC.ControlMessage.prune: array expected"); + message.prune = []; + for (let i = 0; i < object.prune.length; ++i) { + if (typeof object.prune[i] !== "object") + throw TypeError(".RPC.ControlMessage.prune: object expected"); + message.prune[i] = $root.RPC.ControlPrune.fromObject(object.prune[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a ControlMessage message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.ControlMessage + * @static + * @param {RPC.ControlMessage} message ControlMessage + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ControlMessage.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) { + object.ihave = []; + object.iwant = []; + object.graft = []; + object.prune = []; + } + if (message.ihave && message.ihave.length) { + object.ihave = []; + for (let j = 0; j < message.ihave.length; ++j) + object.ihave[j] = $root.RPC.ControlIHave.toObject(message.ihave[j], options); + } + if (message.iwant && message.iwant.length) { + object.iwant = []; + for (let j = 0; j < message.iwant.length; ++j) + object.iwant[j] = $root.RPC.ControlIWant.toObject(message.iwant[j], options); + } + if (message.graft && message.graft.length) { + object.graft = []; + for (let j = 0; j < message.graft.length; ++j) + object.graft[j] = $root.RPC.ControlGraft.toObject(message.graft[j], options); + } + if (message.prune && message.prune.length) { + object.prune = []; + for (let j = 0; j < message.prune.length; ++j) + object.prune[j] = $root.RPC.ControlPrune.toObject(message.prune[j], options); + } + return object; + }; + + /** + * Converts this ControlMessage to JSON. + * @function toJSON + * @memberof RPC.ControlMessage + * @instance + * @returns {Object.} JSON object + */ + ControlMessage.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ControlMessage + * @function getTypeUrl + * @memberof RPC.ControlMessage + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ControlMessage.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.ControlMessage"; + }; + + return ControlMessage; + })(); + + RPC.ControlIHave = (function() { + + /** + * Properties of a ControlIHave. + * @memberof RPC + * @interface IControlIHave + * @property {string|null} [topicID] ControlIHave topicID + * @property {Array.|null} [messageIDs] ControlIHave messageIDs + */ + + /** + * Constructs a new ControlIHave. + * @memberof RPC + * @classdesc Represents a ControlIHave. + * @implements IControlIHave + * @constructor + * @param {RPC.IControlIHave=} [properties] Properties to set + */ + function ControlIHave(properties) { + this.messageIDs = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ControlIHave topicID. + * @member {string|null|undefined} topicID + * @memberof RPC.ControlIHave + * @instance + */ + ControlIHave.prototype.topicID = null; + + /** + * ControlIHave messageIDs. + * @member {Array.} messageIDs + * @memberof RPC.ControlIHave + * @instance + */ + ControlIHave.prototype.messageIDs = $util.emptyArray; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * ControlIHave _topicID. + * @member {"topicID"|undefined} _topicID + * @memberof RPC.ControlIHave + * @instance + */ + Object.defineProperty(ControlIHave.prototype, "_topicID", { + get: $util.oneOfGetter($oneOfFields = ["topicID"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ControlIHave instance using the specified properties. + * @function create + * @memberof RPC.ControlIHave + * @static + * @param {RPC.IControlIHave=} [properties] Properties to set + * @returns {RPC.ControlIHave} ControlIHave instance + */ + ControlIHave.create = function create(properties) { + return new ControlIHave(properties); + }; + + /** + * Encodes the specified ControlIHave message. Does not implicitly {@link RPC.ControlIHave.verify|verify} messages. + * @function encode + * @memberof RPC.ControlIHave + * @static + * @param {RPC.IControlIHave} message ControlIHave message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlIHave.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.topicID != null && Object.hasOwnProperty.call(message, "topicID")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.topicID); + if (message.messageIDs != null && message.messageIDs.length) + for (let i = 0; i < message.messageIDs.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.messageIDs[i]); + return writer; + }; + + /** + * Encodes the specified ControlIHave message, length delimited. Does not implicitly {@link RPC.ControlIHave.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.ControlIHave + * @static + * @param {RPC.IControlIHave} message ControlIHave message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlIHave.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ControlIHave message from the specified reader or buffer. + * @function decode + * @memberof RPC.ControlIHave + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.ControlIHave} ControlIHave + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlIHave.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.ControlIHave(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.topicID = reader.string(); + break; + } + case 2: { + if (!(message.messageIDs && message.messageIDs.length)) + message.messageIDs = []; + message.messageIDs.push(reader.bytes()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ControlIHave message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.ControlIHave + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.ControlIHave} ControlIHave + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlIHave.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ControlIHave message. + * @function verify + * @memberof RPC.ControlIHave + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ControlIHave.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + properties._topicID = 1; + if (!$util.isString(message.topicID)) + return "topicID: string expected"; + } + if (message.messageIDs != null && message.hasOwnProperty("messageIDs")) { + if (!Array.isArray(message.messageIDs)) + return "messageIDs: array expected"; + for (let i = 0; i < message.messageIDs.length; ++i) + if (!(message.messageIDs[i] && typeof message.messageIDs[i].length === "number" || $util.isString(message.messageIDs[i]))) + return "messageIDs: buffer[] expected"; + } + return null; + }; + + /** + * Creates a ControlIHave message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.ControlIHave + * @static + * @param {Object.} object Plain object + * @returns {RPC.ControlIHave} ControlIHave + */ + ControlIHave.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.ControlIHave) + return object; + let message = new $root.RPC.ControlIHave(); + if (object.topicID != null) + message.topicID = String(object.topicID); + if (object.messageIDs) { + if (!Array.isArray(object.messageIDs)) + throw TypeError(".RPC.ControlIHave.messageIDs: array expected"); + message.messageIDs = []; + for (let i = 0; i < object.messageIDs.length; ++i) + if (typeof object.messageIDs[i] === "string") + $util.base64.decode(object.messageIDs[i], message.messageIDs[i] = $util.newBuffer($util.base64.length(object.messageIDs[i])), 0); + else if (object.messageIDs[i].length >= 0) + message.messageIDs[i] = object.messageIDs[i]; + } + return message; + }; + + /** + * Creates a plain object from a ControlIHave message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.ControlIHave + * @static + * @param {RPC.ControlIHave} message ControlIHave + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ControlIHave.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.messageIDs = []; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + object.topicID = message.topicID; + if (options.oneofs) + object._topicID = "topicID"; + } + if (message.messageIDs && message.messageIDs.length) { + object.messageIDs = []; + for (let j = 0; j < message.messageIDs.length; ++j) + object.messageIDs[j] = options.bytes === String ? $util.base64.encode(message.messageIDs[j], 0, message.messageIDs[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.messageIDs[j]) : message.messageIDs[j]; + } + return object; + }; + + /** + * Converts this ControlIHave to JSON. + * @function toJSON + * @memberof RPC.ControlIHave + * @instance + * @returns {Object.} JSON object + */ + ControlIHave.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ControlIHave + * @function getTypeUrl + * @memberof RPC.ControlIHave + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ControlIHave.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.ControlIHave"; + }; + + return ControlIHave; + })(); + + RPC.ControlIWant = (function() { + + /** + * Properties of a ControlIWant. + * @memberof RPC + * @interface IControlIWant + * @property {Array.|null} [messageIDs] ControlIWant messageIDs + */ + + /** + * Constructs a new ControlIWant. + * @memberof RPC + * @classdesc Represents a ControlIWant. + * @implements IControlIWant + * @constructor + * @param {RPC.IControlIWant=} [properties] Properties to set + */ + function ControlIWant(properties) { + this.messageIDs = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ControlIWant messageIDs. + * @member {Array.} messageIDs + * @memberof RPC.ControlIWant + * @instance + */ + ControlIWant.prototype.messageIDs = $util.emptyArray; + + /** + * Creates a new ControlIWant instance using the specified properties. + * @function create + * @memberof RPC.ControlIWant + * @static + * @param {RPC.IControlIWant=} [properties] Properties to set + * @returns {RPC.ControlIWant} ControlIWant instance + */ + ControlIWant.create = function create(properties) { + return new ControlIWant(properties); + }; + + /** + * Encodes the specified ControlIWant message. Does not implicitly {@link RPC.ControlIWant.verify|verify} messages. + * @function encode + * @memberof RPC.ControlIWant + * @static + * @param {RPC.IControlIWant} message ControlIWant message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlIWant.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.messageIDs != null && message.messageIDs.length) + for (let i = 0; i < message.messageIDs.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.messageIDs[i]); + return writer; + }; + + /** + * Encodes the specified ControlIWant message, length delimited. Does not implicitly {@link RPC.ControlIWant.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.ControlIWant + * @static + * @param {RPC.IControlIWant} message ControlIWant message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlIWant.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ControlIWant message from the specified reader or buffer. + * @function decode + * @memberof RPC.ControlIWant + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.ControlIWant} ControlIWant + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlIWant.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.ControlIWant(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (!(message.messageIDs && message.messageIDs.length)) + message.messageIDs = []; + message.messageIDs.push(reader.bytes()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ControlIWant message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.ControlIWant + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.ControlIWant} ControlIWant + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlIWant.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ControlIWant message. + * @function verify + * @memberof RPC.ControlIWant + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ControlIWant.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.messageIDs != null && message.hasOwnProperty("messageIDs")) { + if (!Array.isArray(message.messageIDs)) + return "messageIDs: array expected"; + for (let i = 0; i < message.messageIDs.length; ++i) + if (!(message.messageIDs[i] && typeof message.messageIDs[i].length === "number" || $util.isString(message.messageIDs[i]))) + return "messageIDs: buffer[] expected"; + } + return null; + }; + + /** + * Creates a ControlIWant message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.ControlIWant + * @static + * @param {Object.} object Plain object + * @returns {RPC.ControlIWant} ControlIWant + */ + ControlIWant.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.ControlIWant) + return object; + let message = new $root.RPC.ControlIWant(); + if (object.messageIDs) { + if (!Array.isArray(object.messageIDs)) + throw TypeError(".RPC.ControlIWant.messageIDs: array expected"); + message.messageIDs = []; + for (let i = 0; i < object.messageIDs.length; ++i) + if (typeof object.messageIDs[i] === "string") + $util.base64.decode(object.messageIDs[i], message.messageIDs[i] = $util.newBuffer($util.base64.length(object.messageIDs[i])), 0); + else if (object.messageIDs[i].length >= 0) + message.messageIDs[i] = object.messageIDs[i]; + } + return message; + }; + + /** + * Creates a plain object from a ControlIWant message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.ControlIWant + * @static + * @param {RPC.ControlIWant} message ControlIWant + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ControlIWant.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.messageIDs = []; + if (message.messageIDs && message.messageIDs.length) { + object.messageIDs = []; + for (let j = 0; j < message.messageIDs.length; ++j) + object.messageIDs[j] = options.bytes === String ? $util.base64.encode(message.messageIDs[j], 0, message.messageIDs[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.messageIDs[j]) : message.messageIDs[j]; + } + return object; + }; + + /** + * Converts this ControlIWant to JSON. + * @function toJSON + * @memberof RPC.ControlIWant + * @instance + * @returns {Object.} JSON object + */ + ControlIWant.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ControlIWant + * @function getTypeUrl + * @memberof RPC.ControlIWant + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ControlIWant.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.ControlIWant"; + }; + + return ControlIWant; + })(); + + RPC.ControlGraft = (function() { + + /** + * Properties of a ControlGraft. + * @memberof RPC + * @interface IControlGraft + * @property {string|null} [topicID] ControlGraft topicID + */ + + /** + * Constructs a new ControlGraft. + * @memberof RPC + * @classdesc Represents a ControlGraft. + * @implements IControlGraft + * @constructor + * @param {RPC.IControlGraft=} [properties] Properties to set + */ + function ControlGraft(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ControlGraft topicID. + * @member {string|null|undefined} topicID + * @memberof RPC.ControlGraft + * @instance + */ + ControlGraft.prototype.topicID = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * ControlGraft _topicID. + * @member {"topicID"|undefined} _topicID + * @memberof RPC.ControlGraft + * @instance + */ + Object.defineProperty(ControlGraft.prototype, "_topicID", { + get: $util.oneOfGetter($oneOfFields = ["topicID"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ControlGraft instance using the specified properties. + * @function create + * @memberof RPC.ControlGraft + * @static + * @param {RPC.IControlGraft=} [properties] Properties to set + * @returns {RPC.ControlGraft} ControlGraft instance + */ + ControlGraft.create = function create(properties) { + return new ControlGraft(properties); + }; + + /** + * Encodes the specified ControlGraft message. Does not implicitly {@link RPC.ControlGraft.verify|verify} messages. + * @function encode + * @memberof RPC.ControlGraft + * @static + * @param {RPC.IControlGraft} message ControlGraft message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlGraft.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.topicID != null && Object.hasOwnProperty.call(message, "topicID")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.topicID); + return writer; + }; + + /** + * Encodes the specified ControlGraft message, length delimited. Does not implicitly {@link RPC.ControlGraft.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.ControlGraft + * @static + * @param {RPC.IControlGraft} message ControlGraft message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlGraft.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ControlGraft message from the specified reader or buffer. + * @function decode + * @memberof RPC.ControlGraft + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.ControlGraft} ControlGraft + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlGraft.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.ControlGraft(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.topicID = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ControlGraft message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.ControlGraft + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.ControlGraft} ControlGraft + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlGraft.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ControlGraft message. + * @function verify + * @memberof RPC.ControlGraft + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ControlGraft.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + properties._topicID = 1; + if (!$util.isString(message.topicID)) + return "topicID: string expected"; + } + return null; + }; + + /** + * Creates a ControlGraft message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.ControlGraft + * @static + * @param {Object.} object Plain object + * @returns {RPC.ControlGraft} ControlGraft + */ + ControlGraft.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.ControlGraft) + return object; + let message = new $root.RPC.ControlGraft(); + if (object.topicID != null) + message.topicID = String(object.topicID); + return message; + }; + + /** + * Creates a plain object from a ControlGraft message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.ControlGraft + * @static + * @param {RPC.ControlGraft} message ControlGraft + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ControlGraft.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + object.topicID = message.topicID; + if (options.oneofs) + object._topicID = "topicID"; + } + return object; + }; + + /** + * Converts this ControlGraft to JSON. + * @function toJSON + * @memberof RPC.ControlGraft + * @instance + * @returns {Object.} JSON object + */ + ControlGraft.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ControlGraft + * @function getTypeUrl + * @memberof RPC.ControlGraft + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ControlGraft.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.ControlGraft"; + }; + + return ControlGraft; + })(); + + RPC.ControlPrune = (function() { + + /** + * Properties of a ControlPrune. + * @memberof RPC + * @interface IControlPrune + * @property {string|null} [topicID] ControlPrune topicID + * @property {Array.|null} [peers] ControlPrune peers + * @property {number|Long|null} [backoff] ControlPrune backoff + */ + + /** + * Constructs a new ControlPrune. + * @memberof RPC + * @classdesc Represents a ControlPrune. + * @implements IControlPrune + * @constructor + * @param {RPC.IControlPrune=} [properties] Properties to set + */ + function ControlPrune(properties) { + this.peers = []; + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ControlPrune topicID. + * @member {string|null|undefined} topicID + * @memberof RPC.ControlPrune + * @instance + */ + ControlPrune.prototype.topicID = null; + + /** + * ControlPrune peers. + * @member {Array.} peers + * @memberof RPC.ControlPrune + * @instance + */ + ControlPrune.prototype.peers = $util.emptyArray; + + /** + * ControlPrune backoff. + * @member {number|Long|null|undefined} backoff + * @memberof RPC.ControlPrune + * @instance + */ + ControlPrune.prototype.backoff = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * ControlPrune _topicID. + * @member {"topicID"|undefined} _topicID + * @memberof RPC.ControlPrune + * @instance + */ + Object.defineProperty(ControlPrune.prototype, "_topicID", { + get: $util.oneOfGetter($oneOfFields = ["topicID"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * ControlPrune _backoff. + * @member {"backoff"|undefined} _backoff + * @memberof RPC.ControlPrune + * @instance + */ + Object.defineProperty(ControlPrune.prototype, "_backoff", { + get: $util.oneOfGetter($oneOfFields = ["backoff"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ControlPrune instance using the specified properties. + * @function create + * @memberof RPC.ControlPrune + * @static + * @param {RPC.IControlPrune=} [properties] Properties to set + * @returns {RPC.ControlPrune} ControlPrune instance + */ + ControlPrune.create = function create(properties) { + return new ControlPrune(properties); + }; + + /** + * Encodes the specified ControlPrune message. Does not implicitly {@link RPC.ControlPrune.verify|verify} messages. + * @function encode + * @memberof RPC.ControlPrune + * @static + * @param {RPC.IControlPrune} message ControlPrune message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlPrune.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.topicID != null && Object.hasOwnProperty.call(message, "topicID")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.topicID); + if (message.peers != null && message.peers.length) + for (let i = 0; i < message.peers.length; ++i) + $root.RPC.PeerInfo.encode(message.peers[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.backoff != null && Object.hasOwnProperty.call(message, "backoff")) + writer.uint32(/* id 3, wireType 0 =*/24).uint64(message.backoff); + return writer; + }; + + /** + * Encodes the specified ControlPrune message, length delimited. Does not implicitly {@link RPC.ControlPrune.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.ControlPrune + * @static + * @param {RPC.IControlPrune} message ControlPrune message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ControlPrune.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ControlPrune message from the specified reader or buffer. + * @function decode + * @memberof RPC.ControlPrune + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.ControlPrune} ControlPrune + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlPrune.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.ControlPrune(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.topicID = reader.string(); + break; + } + case 2: { + if (!(message.peers && message.peers.length)) + message.peers = []; + message.peers.push($root.RPC.PeerInfo.decode(reader, reader.uint32())); + break; + } + case 3: { + message.backoff = reader.uint64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ControlPrune message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.ControlPrune + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.ControlPrune} ControlPrune + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ControlPrune.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ControlPrune message. + * @function verify + * @memberof RPC.ControlPrune + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ControlPrune.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + properties._topicID = 1; + if (!$util.isString(message.topicID)) + return "topicID: string expected"; + } + if (message.peers != null && message.hasOwnProperty("peers")) { + if (!Array.isArray(message.peers)) + return "peers: array expected"; + for (let i = 0; i < message.peers.length; ++i) { + let error = $root.RPC.PeerInfo.verify(message.peers[i]); + if (error) + return "peers." + error; + } + } + if (message.backoff != null && message.hasOwnProperty("backoff")) { + properties._backoff = 1; + if (!$util.isInteger(message.backoff) && !(message.backoff && $util.isInteger(message.backoff.low) && $util.isInteger(message.backoff.high))) + return "backoff: integer|Long expected"; + } + return null; + }; + + /** + * Creates a ControlPrune message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.ControlPrune + * @static + * @param {Object.} object Plain object + * @returns {RPC.ControlPrune} ControlPrune + */ + ControlPrune.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.ControlPrune) + return object; + let message = new $root.RPC.ControlPrune(); + if (object.topicID != null) + message.topicID = String(object.topicID); + if (object.peers) { + if (!Array.isArray(object.peers)) + throw TypeError(".RPC.ControlPrune.peers: array expected"); + message.peers = []; + for (let i = 0; i < object.peers.length; ++i) { + if (typeof object.peers[i] !== "object") + throw TypeError(".RPC.ControlPrune.peers: object expected"); + message.peers[i] = $root.RPC.PeerInfo.fromObject(object.peers[i]); + } + } + if (object.backoff != null) + if ($util.Long) + (message.backoff = $util.Long.fromValue(object.backoff)).unsigned = true; + else if (typeof object.backoff === "string") + message.backoff = parseInt(object.backoff, 10); + else if (typeof object.backoff === "number") + message.backoff = object.backoff; + else if (typeof object.backoff === "object") + message.backoff = new $util.LongBits(object.backoff.low >>> 0, object.backoff.high >>> 0).toNumber(true); + return message; + }; + + /** + * Creates a plain object from a ControlPrune message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.ControlPrune + * @static + * @param {RPC.ControlPrune} message ControlPrune + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ControlPrune.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (options.arrays || options.defaults) + object.peers = []; + if (message.topicID != null && message.hasOwnProperty("topicID")) { + object.topicID = message.topicID; + if (options.oneofs) + object._topicID = "topicID"; + } + if (message.peers && message.peers.length) { + object.peers = []; + for (let j = 0; j < message.peers.length; ++j) + object.peers[j] = $root.RPC.PeerInfo.toObject(message.peers[j], options); + } + if (message.backoff != null && message.hasOwnProperty("backoff")) { + if (typeof message.backoff === "number") + object.backoff = options.longs === String ? String(message.backoff) : message.backoff; + else + object.backoff = options.longs === String ? $util.Long.prototype.toString.call(message.backoff) : options.longs === Number ? new $util.LongBits(message.backoff.low >>> 0, message.backoff.high >>> 0).toNumber(true) : message.backoff; + if (options.oneofs) + object._backoff = "backoff"; + } + return object; + }; + + /** + * Converts this ControlPrune to JSON. + * @function toJSON + * @memberof RPC.ControlPrune + * @instance + * @returns {Object.} JSON object + */ + ControlPrune.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ControlPrune + * @function getTypeUrl + * @memberof RPC.ControlPrune + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ControlPrune.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.ControlPrune"; + }; + + return ControlPrune; + })(); + + RPC.PeerInfo = (function() { + + /** + * Properties of a PeerInfo. + * @memberof RPC + * @interface IPeerInfo + * @property {Uint8Array|null} [peerID] PeerInfo peerID + * @property {Uint8Array|null} [signedPeerRecord] PeerInfo signedPeerRecord + */ + + /** + * Constructs a new PeerInfo. + * @memberof RPC + * @classdesc Represents a PeerInfo. + * @implements IPeerInfo + * @constructor + * @param {RPC.IPeerInfo=} [properties] Properties to set + */ + function PeerInfo(properties) { + if (properties) + for (let keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PeerInfo peerID. + * @member {Uint8Array|null|undefined} peerID + * @memberof RPC.PeerInfo + * @instance + */ + PeerInfo.prototype.peerID = null; + + /** + * PeerInfo signedPeerRecord. + * @member {Uint8Array|null|undefined} signedPeerRecord + * @memberof RPC.PeerInfo + * @instance + */ + PeerInfo.prototype.signedPeerRecord = null; + + // OneOf field names bound to virtual getters and setters + let $oneOfFields; + + /** + * PeerInfo _peerID. + * @member {"peerID"|undefined} _peerID + * @memberof RPC.PeerInfo + * @instance + */ + Object.defineProperty(PeerInfo.prototype, "_peerID", { + get: $util.oneOfGetter($oneOfFields = ["peerID"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * PeerInfo _signedPeerRecord. + * @member {"signedPeerRecord"|undefined} _signedPeerRecord + * @memberof RPC.PeerInfo + * @instance + */ + Object.defineProperty(PeerInfo.prototype, "_signedPeerRecord", { + get: $util.oneOfGetter($oneOfFields = ["signedPeerRecord"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new PeerInfo instance using the specified properties. + * @function create + * @memberof RPC.PeerInfo + * @static + * @param {RPC.IPeerInfo=} [properties] Properties to set + * @returns {RPC.PeerInfo} PeerInfo instance + */ + PeerInfo.create = function create(properties) { + return new PeerInfo(properties); + }; + + /** + * Encodes the specified PeerInfo message. Does not implicitly {@link RPC.PeerInfo.verify|verify} messages. + * @function encode + * @memberof RPC.PeerInfo + * @static + * @param {RPC.IPeerInfo} message PeerInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PeerInfo.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.peerID != null && Object.hasOwnProperty.call(message, "peerID")) + writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.peerID); + if (message.signedPeerRecord != null && Object.hasOwnProperty.call(message, "signedPeerRecord")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.signedPeerRecord); + return writer; + }; + + /** + * Encodes the specified PeerInfo message, length delimited. Does not implicitly {@link RPC.PeerInfo.verify|verify} messages. + * @function encodeDelimited + * @memberof RPC.PeerInfo + * @static + * @param {RPC.IPeerInfo} message PeerInfo message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PeerInfo.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PeerInfo message from the specified reader or buffer. + * @function decode + * @memberof RPC.PeerInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {RPC.PeerInfo} PeerInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PeerInfo.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + let end = length === undefined ? reader.len : reader.pos + length, message = new $root.RPC.PeerInfo(); + while (reader.pos < end) { + let tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.peerID = reader.bytes(); + break; + } + case 2: { + message.signedPeerRecord = reader.bytes(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PeerInfo message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof RPC.PeerInfo + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {RPC.PeerInfo} PeerInfo + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PeerInfo.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PeerInfo message. + * @function verify + * @memberof RPC.PeerInfo + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PeerInfo.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + let properties = {}; + if (message.peerID != null && message.hasOwnProperty("peerID")) { + properties._peerID = 1; + if (!(message.peerID && typeof message.peerID.length === "number" || $util.isString(message.peerID))) + return "peerID: buffer expected"; + } + if (message.signedPeerRecord != null && message.hasOwnProperty("signedPeerRecord")) { + properties._signedPeerRecord = 1; + if (!(message.signedPeerRecord && typeof message.signedPeerRecord.length === "number" || $util.isString(message.signedPeerRecord))) + return "signedPeerRecord: buffer expected"; + } + return null; + }; + + /** + * Creates a PeerInfo message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof RPC.PeerInfo + * @static + * @param {Object.} object Plain object + * @returns {RPC.PeerInfo} PeerInfo + */ + PeerInfo.fromObject = function fromObject(object) { + if (object instanceof $root.RPC.PeerInfo) + return object; + let message = new $root.RPC.PeerInfo(); + if (object.peerID != null) + if (typeof object.peerID === "string") + $util.base64.decode(object.peerID, message.peerID = $util.newBuffer($util.base64.length(object.peerID)), 0); + else if (object.peerID.length >= 0) + message.peerID = object.peerID; + if (object.signedPeerRecord != null) + if (typeof object.signedPeerRecord === "string") + $util.base64.decode(object.signedPeerRecord, message.signedPeerRecord = $util.newBuffer($util.base64.length(object.signedPeerRecord)), 0); + else if (object.signedPeerRecord.length >= 0) + message.signedPeerRecord = object.signedPeerRecord; + return message; + }; + + /** + * Creates a plain object from a PeerInfo message. Also converts values to other types if specified. + * @function toObject + * @memberof RPC.PeerInfo + * @static + * @param {RPC.PeerInfo} message PeerInfo + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PeerInfo.toObject = function toObject(message, options) { + if (!options) + options = {}; + let object = {}; + if (message.peerID != null && message.hasOwnProperty("peerID")) { + object.peerID = options.bytes === String ? $util.base64.encode(message.peerID, 0, message.peerID.length) : options.bytes === Array ? Array.prototype.slice.call(message.peerID) : message.peerID; + if (options.oneofs) + object._peerID = "peerID"; + } + if (message.signedPeerRecord != null && message.hasOwnProperty("signedPeerRecord")) { + object.signedPeerRecord = options.bytes === String ? $util.base64.encode(message.signedPeerRecord, 0, message.signedPeerRecord.length) : options.bytes === Array ? Array.prototype.slice.call(message.signedPeerRecord) : message.signedPeerRecord; + if (options.oneofs) + object._signedPeerRecord = "signedPeerRecord"; + } + return object; + }; + + /** + * Converts this PeerInfo to JSON. + * @function toJSON + * @memberof RPC.PeerInfo + * @instance + * @returns {Object.} JSON object + */ + PeerInfo.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PeerInfo + * @function getTypeUrl + * @memberof RPC.PeerInfo + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PeerInfo.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/RPC.PeerInfo"; + }; + + return PeerInfo; + })(); + + return RPC; +})(); + +export { $root as default }; diff --git a/packages/protons-benchmark/src/protons/rpc.ts b/packages/protons-benchmark/src/protons/rpc.ts new file mode 100644 index 0000000..571ed08 --- /dev/null +++ b/packages/protons-benchmark/src/protons/rpc.ts @@ -0,0 +1,746 @@ +/* eslint-disable import/export */ +/* eslint-disable @typescript-eslint/no-namespace */ + +import { encodeMessage, decodeMessage, message } from 'protons-runtime' +import type { Uint8ArrayList } from 'uint8arraylist' +import type { Codec } from 'protons-runtime' + +export interface RPC { + subscriptions: RPC.SubOpts[] + messages: RPC.Message[] + control?: RPC.ControlMessage +} + +export namespace RPC { + export interface SubOpts { + subscribe?: boolean + topic?: string + } + + export namespace SubOpts { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.subscribe != null) { + writer.uint32(8) + writer.bool(obj.subscribe) + } + + if (obj.topic != null) { + writer.uint32(18) + writer.string(obj.topic) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.subscribe = reader.bool() + break + case 2: + obj.topic = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: SubOpts): Uint8Array => { + return encodeMessage(obj, SubOpts.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): SubOpts => { + return decodeMessage(buf, SubOpts.codec()) + } + } + + export interface Message { + from?: Uint8Array + data?: Uint8Array + seqno?: Uint8Array + topic: string + signature?: Uint8Array + key?: Uint8Array + } + + export namespace Message { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.from != null) { + writer.uint32(10) + writer.bytes(obj.from) + } + + if (obj.data != null) { + writer.uint32(18) + writer.bytes(obj.data) + } + + if (obj.seqno != null) { + writer.uint32(26) + writer.bytes(obj.seqno) + } + + if (obj.topic != null) { + writer.uint32(34) + writer.string(obj.topic) + } else { + throw new Error('Protocol error: required field "topic" was not found in object') + } + + if (obj.signature != null) { + writer.uint32(42) + writer.bytes(obj.signature) + } + + if (obj.key != null) { + writer.uint32(50) + writer.bytes(obj.key) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.from = reader.bytes() + break + case 2: + obj.data = reader.bytes() + break + case 3: + obj.seqno = reader.bytes() + break + case 4: + obj.topic = reader.string() + break + case 5: + obj.signature = reader.bytes() + break + case 6: + obj.key = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + if (obj.topic == null) { + throw new Error('Protocol error: value for required field "topic" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Message): Uint8Array => { + return encodeMessage(obj, Message.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Message => { + return decodeMessage(buf, Message.codec()) + } + } + + export interface ControlMessage { + ihave: RPC.ControlIHave[] + iwant: RPC.ControlIWant[] + graft: RPC.ControlGraft[] + prune: RPC.ControlPrune[] + } + + export namespace ControlMessage { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.ihave != null) { + for (const value of obj.ihave) { + writer.uint32(10) + RPC.ControlIHave.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "ihave" was not found in object') + } + + if (obj.iwant != null) { + for (const value of obj.iwant) { + writer.uint32(18) + RPC.ControlIWant.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "iwant" was not found in object') + } + + if (obj.graft != null) { + for (const value of obj.graft) { + writer.uint32(26) + RPC.ControlGraft.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "graft" was not found in object') + } + + if (obj.prune != null) { + for (const value of obj.prune) { + writer.uint32(34) + RPC.ControlPrune.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "prune" was not found in object') + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.ihave = obj.ihave ?? [] + obj.ihave.push(RPC.ControlIHave.codec().decode(reader, reader.uint32())) + break + case 2: + obj.iwant = obj.iwant ?? [] + obj.iwant.push(RPC.ControlIWant.codec().decode(reader, reader.uint32())) + break + case 3: + obj.graft = obj.graft ?? [] + obj.graft.push(RPC.ControlGraft.codec().decode(reader, reader.uint32())) + break + case 4: + obj.prune = obj.prune ?? [] + obj.prune.push(RPC.ControlPrune.codec().decode(reader, reader.uint32())) + break + default: + reader.skipType(tag & 7) + break + } + } + + obj.ihave = obj.ihave ?? [] + obj.iwant = obj.iwant ?? [] + obj.graft = obj.graft ?? [] + obj.prune = obj.prune ?? [] + + if (obj.ihave == null) { + throw new Error('Protocol error: value for required field "ihave" was not found in protobuf') + } + + if (obj.iwant == null) { + throw new Error('Protocol error: value for required field "iwant" was not found in protobuf') + } + + if (obj.graft == null) { + throw new Error('Protocol error: value for required field "graft" was not found in protobuf') + } + + if (obj.prune == null) { + throw new Error('Protocol error: value for required field "prune" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: ControlMessage): Uint8Array => { + return encodeMessage(obj, ControlMessage.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ControlMessage => { + return decodeMessage(buf, ControlMessage.codec()) + } + } + + export interface ControlIHave { + topicID?: string + messageIDs: Uint8Array[] + } + + export namespace ControlIHave { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.topicID != null) { + writer.uint32(10) + writer.string(obj.topicID) + } + + if (obj.messageIDs != null) { + for (const value of obj.messageIDs) { + writer.uint32(18) + writer.bytes(value) + } + } else { + throw new Error('Protocol error: required field "messageIDs" was not found in object') + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.topicID = reader.string() + break + case 2: + obj.messageIDs = obj.messageIDs ?? [] + obj.messageIDs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } + + obj.messageIDs = obj.messageIDs ?? [] + + if (obj.messageIDs == null) { + throw new Error('Protocol error: value for required field "messageIDs" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: ControlIHave): Uint8Array => { + return encodeMessage(obj, ControlIHave.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ControlIHave => { + return decodeMessage(buf, ControlIHave.codec()) + } + } + + export interface ControlIWant { + messageIDs: Uint8Array[] + } + + export namespace ControlIWant { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.messageIDs != null) { + for (const value of obj.messageIDs) { + writer.uint32(10) + writer.bytes(value) + } + } else { + throw new Error('Protocol error: required field "messageIDs" was not found in object') + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.messageIDs = obj.messageIDs ?? [] + obj.messageIDs.push(reader.bytes()) + break + default: + reader.skipType(tag & 7) + break + } + } + + obj.messageIDs = obj.messageIDs ?? [] + + if (obj.messageIDs == null) { + throw new Error('Protocol error: value for required field "messageIDs" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: ControlIWant): Uint8Array => { + return encodeMessage(obj, ControlIWant.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ControlIWant => { + return decodeMessage(buf, ControlIWant.codec()) + } + } + + export interface ControlGraft { + topicID?: string + } + + export namespace ControlGraft { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.topicID != null) { + writer.uint32(10) + writer.string(obj.topicID) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.topicID = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: ControlGraft): Uint8Array => { + return encodeMessage(obj, ControlGraft.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ControlGraft => { + return decodeMessage(buf, ControlGraft.codec()) + } + } + + export interface ControlPrune { + topicID?: string + peers: RPC.PeerInfo[] + backoff?: bigint + } + + export namespace ControlPrune { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.topicID != null) { + writer.uint32(10) + writer.string(obj.topicID) + } + + if (obj.peers != null) { + for (const value of obj.peers) { + writer.uint32(18) + RPC.PeerInfo.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "peers" was not found in object') + } + + if (obj.backoff != null) { + writer.uint32(24) + writer.uint64(obj.backoff) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.topicID = reader.string() + break + case 2: + obj.peers = obj.peers ?? [] + obj.peers.push(RPC.PeerInfo.codec().decode(reader, reader.uint32())) + break + case 3: + obj.backoff = reader.uint64() + break + default: + reader.skipType(tag & 7) + break + } + } + + obj.peers = obj.peers ?? [] + + if (obj.peers == null) { + throw new Error('Protocol error: value for required field "peers" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: ControlPrune): Uint8Array => { + return encodeMessage(obj, ControlPrune.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): ControlPrune => { + return decodeMessage(buf, ControlPrune.codec()) + } + } + + export interface PeerInfo { + peerID?: Uint8Array + signedPeerRecord?: Uint8Array + } + + export namespace PeerInfo { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.peerID != null) { + writer.uint32(10) + writer.bytes(obj.peerID) + } + + if (obj.signedPeerRecord != null) { + writer.uint32(18) + writer.bytes(obj.signedPeerRecord) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.peerID = reader.bytes() + break + case 2: + obj.signedPeerRecord = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: PeerInfo): Uint8Array => { + return encodeMessage(obj, PeerInfo.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): PeerInfo => { + return decodeMessage(buf, PeerInfo.codec()) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, writer, opts = {}) => { + if (opts.lengthDelimited !== false) { + writer.fork() + } + + if (obj.subscriptions != null) { + for (const value of obj.subscriptions) { + writer.uint32(10) + RPC.SubOpts.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "subscriptions" was not found in object') + } + + if (obj.messages != null) { + for (const value of obj.messages) { + writer.uint32(18) + RPC.Message.codec().encode(value, writer) + } + } else { + throw new Error('Protocol error: required field "messages" was not found in object') + } + + if (obj.control != null) { + writer.uint32(26) + RPC.ControlMessage.codec().encode(obj.control, writer) + } + + if (opts.lengthDelimited !== false) { + writer.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.subscriptions = obj.subscriptions ?? [] + obj.subscriptions.push(RPC.SubOpts.codec().decode(reader, reader.uint32())) + break + case 2: + obj.messages = obj.messages ?? [] + obj.messages.push(RPC.Message.codec().decode(reader, reader.uint32())) + break + case 3: + obj.control = RPC.ControlMessage.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + obj.subscriptions = obj.subscriptions ?? [] + obj.messages = obj.messages ?? [] + + if (obj.subscriptions == null) { + throw new Error('Protocol error: value for required field "subscriptions" was not found in protobuf') + } + + if (obj.messages == null) { + throw new Error('Protocol error: value for required field "messages" was not found in protobuf') + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: RPC): Uint8Array => { + return encodeMessage(obj, RPC.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): RPC => { + return decodeMessage(buf, RPC.codec()) + } +} diff --git a/packages/protons-benchmark/src/rpc.proto b/packages/protons-benchmark/src/rpc.proto new file mode 100644 index 0000000..3ddce5b --- /dev/null +++ b/packages/protons-benchmark/src/rpc.proto @@ -0,0 +1,52 @@ +syntax = "proto3"; + +message RPC { + repeated SubOpts subscriptions = 1; + repeated Message messages = 2; + optional ControlMessage control = 3; + + message SubOpts { + optional bool subscribe = 1; // subscribe or unsubcribe + optional string topic = 2; + } + + message Message { + optional bytes from = 1; + optional bytes data = 2; + optional bytes seqno = 3; + required string topic = 4; + optional bytes signature = 5; + optional bytes key = 6; + } + + message ControlMessage { + repeated ControlIHave ihave = 1; + repeated ControlIWant iwant = 2; + repeated ControlGraft graft = 3; + repeated ControlPrune prune = 4; + } + + message ControlIHave { + optional string topicID = 1; + repeated bytes messageIDs = 2; + } + + message ControlIWant { + repeated bytes messageIDs = 1; + } + + message ControlGraft { + optional string topicID = 1; + } + + message ControlPrune { + optional string topicID = 1; + repeated PeerInfo peers = 2; + optional uint64 backoff = 3; + } + + message PeerInfo { + optional bytes peerID = 1; + optional bytes signedPeerRecord = 2; + } +} diff --git a/packages/protons-benchmark/src/rpc.ts b/packages/protons-benchmark/src/rpc.ts new file mode 100644 index 0000000..da897b2 --- /dev/null +++ b/packages/protons-benchmark/src/rpc.ts @@ -0,0 +1,49 @@ +/* eslint-disable no-console */ + +/* +$ node dist/src/index.js +$ npx playwright-test dist/src/index.js --runner benchmark +*/ + +import Benchmark from 'benchmark' +import { RPC as ProtonsRPC } from './protons/rpc.js' +import { RPC as ProtobufjsRPC } from './protobufjs/rpc.js' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' + +const rpc = { + subscriptions: [], + messages: [ + { + topic: 'topic1', + // typical Attestation + data: uint8ArrayFromString( + 'e40000000a000000000000000a00000000000000a45c8daa336e17a150300afd4c717313c84f291754c51a378f20958083c5fa070a00000000000000a45c8daa336e17a150300afd4c717313c84f291754c51a378f20958083c5fa070a00000000000000a45c8daa336e17a150300afd4c717313c84f291754c51a378f20958083c5fa0795d2ef8ae4e2b4d1e5b3d5ce47b518e3db2c8c4d082e4498805ac2a686c69f248761b78437db2927470c1e77ede9c18606110faacbcbe4f13052bde7f7eff6aab09edf7bc4929fda2230f943aba2c47b6f940d350cb20c76fad4a8d40e2f3f1f01', + 'hex' + ), + signature: Uint8Array.from(Array.from({ length: 96 }, () => 100)) + } + ], + control: undefined +} + +const bytes = ProtobufjsRPC.encode(rpc).finish() + +new Benchmark.Suite() + .add('protons', () => { + ProtonsRPC.decode(bytes) + }) + .add('protobufjs', () => { + ProtobufjsRPC.decode(bytes) + }) + .on('error', (err: Error) => { + console.error(err) + }) + .on('cycle', (event: any) => { + console.info(String(event.target)) + }) + .on('complete', function () { + // @ts-expect-error types are wrong + console.info(`Fastest is ${this.filter('fastest').map('name')}`) // eslint-disable-line @typescript-eslint/restrict-template-expressions + }) + // run async + .run({ async: true }) diff --git a/packages/protons-benchmark/tsconfig.json b/packages/protons-benchmark/tsconfig.json index dae6c30..86b3837 100644 --- a/packages/protons-benchmark/tsconfig.json +++ b/packages/protons-benchmark/tsconfig.json @@ -9,7 +9,8 @@ "test" ], "exclude": [ - "src/protobufjs/bench.js" + "src/protobufjs/bench.js", + "src/protobufjs/rpc.js" ], "references": [ { From d8f5c8d2feabf458cd8bf7e49e01a45faefc72b4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 10 Aug 2022 07:18:54 +0100 Subject: [PATCH 14/14] chore: add missing dep --- packages/protons-benchmark/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/protons-benchmark/package.json b/packages/protons-benchmark/package.json index 43c0ce3..b861608 100644 --- a/packages/protons-benchmark/package.json +++ b/packages/protons-benchmark/package.json @@ -74,7 +74,8 @@ "pbjs": "^0.0.14", "protobufjs": "^7.0.0", "protons": "^4.0.0", - "protons-runtime": "^2.0.0" + "protons-runtime": "^2.0.0", + "uint8arrays": "^3.1.0" }, "private": true }