Skip to content

Commit

Permalink
feat(json-crdt): 🎸 improve frontier decoding in LogDecoder
Browse files Browse the repository at this point in the history
  • Loading branch information
streamich committed Apr 2, 2024
1 parent ebbcd9a commit 9c0f531
Show file tree
Hide file tree
Showing 4 changed files with 112 additions and 106 deletions.
42 changes: 14 additions & 28 deletions src/json-crdt/log/codec/LogDecoder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,47 +71,33 @@ export class LogDecoder {
}

public deserialize(components: types.LogComponentsWithFrontier, params: DeserializeParams = {}): DecodeResult {
const [view, metadata, model, history, ...frontier] = components;
const [view, metadata, model, , ...frontier] = components;
const result: DecodeResult = {};
if (params.view) result.view = view;
if (params.history) result.history = this.deserializeHistory(components);
if (params.frontier) {
if (!model) result.history = this.deserializeHistory(components);
if (result.history) {
result.frontier = result.history;
} else {
} else if (model) {
const modelFormat = metadata[1];
let decodedModel: Model<any> | null = null;
if (model) {
const start = (): Model => {
const isSidecar = modelFormat === FileModelEncoding.SidecarBinary;
if (isSidecar) {
const decoder = this.opts.sidecarDecoder;
if (!decoder) throw new Error('NO_SIDECAR_DECODER');
if (!(model instanceof Uint8Array)) throw new Error('NOT_BLOB');
decodedModel = decoder.decode(view, model);
} else {
decodedModel = this.deserializeModel(model);
}
}
let log: Log | null = null;
if (history) {
const [start, patches] = history;
if (start) {
log = new Log(() => this.deserializeModel(start));
for (const patch of patches) log.end.applyPatch(this.deserializePatch(patch));
return decoder.decode(view, model);
}
}
if (!log) throw new Error('NO_HISTORY');
if (!decodedModel) decodedModel = log.replayToEnd();
if (frontier.length) {
for (const patch of frontier) {
const patchDecoded = this.deserializePatch(patch);
decodedModel.applyPatch(patchDecoded);
log.end.applyPatch(patchDecoded);
}
}
throw new Error('NOT_IMPLEMENTED');
// const file = new Log()
// return file;
return this.deserializeModel(model);
};
const log = new Log(start);
const end = log.end;
if (frontier && frontier.length)
for (const patch of frontier) end.applyPatch(this.deserializePatch(patch));
result.frontier = log;
} else {
throw new Error('NO_MODEL');
}
}
return result;
Expand Down
2 changes: 1 addition & 1 deletion src/json-crdt/log/codec/LogEncoder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ export class LogEncoder {
return [params.noView ? null : log.end.view(), metadata, model, history];
}

public toBinary(log: Log, params: EncodingParams): Uint8Array {
public encode(log: Log, params: EncodingParams): Uint8Array {
const sequence = this.serialize(log, params);
switch (params.format) {
case 'ndjson': {
Expand Down
95 changes: 95 additions & 0 deletions src/json-crdt/log/codec/__tests__/LogDecoder.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import {Log} from '../../Log';
import {Model} from '../../../model';
import {logEncoderOpts} from '../logEncoderOpts';
import {EncodingParams, LogEncoder} from '../LogEncoder';
import {LogDecoder} from '../LogDecoder';
import {logDecoderOpts} from '../logDecoderOpts';;

const setup = (view: unknown) => {
const model = Model.withServerClock();
model.api.root(view);
const log = Log.fromNewModel(model);
const encoder = new LogEncoder(logEncoderOpts);
const decoder = new LogDecoder(logDecoderOpts);
return {model, log, encoder, decoder};
};

describe('can decode from blob', () => {
test('.ndjson', () => {
const {log, encoder, decoder} = setup({foo: 'bar'});
const blob = encoder.encode(log, {format: 'ndjson', model: 'compact', history: 'compact'});
const decoded = decoder.decode(blob, {format: 'ndjson', frontier: true, history: true});
const {frontier, history} = decoded;
expect(frontier!.end.view()).toEqual({foo: 'bar'});
expect(frontier!.end !== log.end).toBe(true);
expect(history!.start().view()).toEqual(undefined);
expect(history!.end.view()).toEqual({foo: 'bar'});
});

test('.seq.cbor', () => {
const {log, encoder, decoder} = setup({foo: 'bar'});
const blob = encoder.encode(log, {format: 'seq.cbor', model: 'binary', history: 'binary'});
const decoded = decoder.decode(blob, {format: 'seq.cbor', frontier: true, history: true});
const {frontier, history} = decoded;
expect(frontier!.end.view()).toEqual({foo: 'bar'});
expect(frontier!.end !== log.end).toBe(true);
expect(history!.start().view()).toEqual(undefined);
expect(history!.end.view()).toEqual({foo: 'bar'});
});
});

const assertEncoding = (log: Log, params: EncodingParams) => {
const encoder = new LogEncoder(logEncoderOpts);
const decoder = new LogDecoder(logDecoderOpts);
const encoded = encoder.encode(log, params);
const decoded = decoder.decode(encoded, {
format: params.format,
frontier: true,
history: true,
});
expect(decoded.frontier!.end.view()).toEqual(log.end.view());
expect(decoded.frontier!.end !== log.end).toBe(true);
expect(decoded.history!.start().view()).toEqual(undefined);
expect(decoded.history!.replayToEnd().view()).toEqual(log.end.view());
expect(decoded.history!.patches.size()).toBe(log.patches.size());
};

describe('can encode/decode all format combinations', () => {
const formats: EncodingParams['format'][] = ['ndjson', 'seq.cbor'];
const modelFormats: EncodingParams['model'][] = ['sidecar', 'binary', 'compact', 'verbose'];
const historyFormats: EncodingParams['history'][] = ['binary', 'compact', 'verbose'];
const noViews = [true, false];
for (const format of formats) {
for (const model of modelFormats) {
for (const history of historyFormats) {
for (const noView of noViews) {
if (noView && model === 'sidecar') continue;
const params = {format, model, history, noView};
test(JSON.stringify(params), () => {
const {log} = setup({foo: 'bar'});
assertEncoding(log, params);
});
}
}
}
}
});

describe('.deserialize()', () => {
test('applies frontier', () => {
const {log, model, encoder, decoder} = setup({foo: 'bar'});
const clone = model.clone();
clone.api.obj([]).set({
xyz: 123,
});
const serialized = encoder.serialize(log, {
history: 'binary',
});
serialized.push(clone.api.flush().toBinary());
expect(log.end.view()).toEqual({foo: 'bar'});
const deserialized1 = decoder.deserialize(serialized, {frontier: true});
const deserialized2 = decoder.deserialize(serialized, {history: true});
expect(deserialized1.frontier!.end.view()).toEqual({foo: 'bar', xyz: 123});
expect(deserialized2.history!.end.view()).toEqual({foo: 'bar', xyz: 123});
});
});
79 changes: 2 additions & 77 deletions src/json-crdt/log/codec/__tests__/LogEncoder.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,6 @@ import {logEncoderOpts} from '../logEncoderOpts';
import {LogEncoder} from '../LogEncoder';
import {Log} from '../../Log';
import {CborDecoder} from '../../../../json-pack/cbor/CborDecoder';
// import {CborDecoder} from '../../../json-pack/cbor/CborDecoder';
// import {FileEncodingParams} from '../types';
// import {fileEncoders} from '../fileEncoders';

const setup = (view: unknown) => {
const model = Model.withServerClock();
Expand All @@ -21,90 +18,18 @@ describe('.toBinary()', () => {
describe('can read first value as view', () => {
test('.ndjson', () => {
const {encoder, log} = setup({foo: 'bar'});
const blob = encoder.toBinary(log, {format: 'ndjson', model: 'compact', history: 'compact'});
const blob = encoder.encode(log, {format: 'ndjson', model: 'compact', history: 'compact'});
const decoder = new JsonDecoder();
const view = decoder.read(blob);
expect(view).toEqual({foo: 'bar'});
});

test('.seq.cbor', () => {
const {encoder, log} = setup({foo: 'bar'});
const blob = encoder.toBinary(log, {format: 'seq.cbor'});
const blob = encoder.encode(log, {format: 'seq.cbor'});
const decoder = new CborDecoder();
const view = decoder.read(blob);
expect(view).toEqual({foo: 'bar'});
});
});

// describe('can decode from blob', () => {
// test('.ndjson', () => {
// const {file} = setup({foo: 'bar'});
// const blob = file.toBinary({format: 'ndjson', model: 'compact', history: 'compact'});
// const file2 = File.fromNdjson(blob);
// expect(file2.model.view()).toEqual({foo: 'bar'});
// expect(file2.model !== file.model).toBe(true);
// expect(file.log.start().view()).toEqual(undefined);
// expect(file.log.replayToEnd().view()).toEqual({foo: 'bar'});
// });

// test('.seq.cbor', () => {
// const {file} = setup({foo: 'bar'});
// const blob = file.toBinary({format: 'seq.cbor', model: 'binary', history: 'binary'});
// const file2 = File.fromSeqCbor(blob);
// expect(file2.model.view()).toEqual({foo: 'bar'});
// expect(file2.model !== file.model).toBe(true);
// expect(file.log.start().view()).toEqual(undefined);
// expect(file.log.replayToEnd().view()).toEqual({foo: 'bar'});
// });
// });

// const assertEncoding = (file: File, params: FileEncodingParams) => {
// const blob = file.toBinary(params);
// // if (params.format === 'ndjson') console.log(Buffer.from(blob).toString('utf8'))
// const file2 =
// params.format === 'seq.cbor' ? File.fromSeqCbor(blob, fileEncoders) : File.fromNdjson(blob, fileEncoders);
// expect(file2.model.view()).toEqual(file.model.view());
// expect(file2.model !== file.model).toBe(true);
// expect(file2.log.start().view()).toEqual(undefined);
// expect(file2.log.replayToEnd().view()).toEqual(file.model.view());
// expect(file2.log.patches.size()).toBe(file.log.patches.size());
// };

// describe('can encode/decode all format combinations', () => {
// const formats: FileEncodingParams['format'][] = ['ndjson', 'seq.cbor'];
// const modelFormats: FileEncodingParams['model'][] = ['sidecar', 'binary', 'compact', 'verbose'];
// const historyFormats: FileEncodingParams['history'][] = ['binary', 'compact', 'verbose'];
// const noViews = [true, false];
// for (const format of formats) {
// for (const model of modelFormats) {
// for (const history of historyFormats) {
// for (const noView of noViews) {
// if (noView && model === 'sidecar') continue;
// const params = {format, model, history, noView};
// test(JSON.stringify(params), () => {
// const {file} = setup({foo: 'bar'});
// assertEncoding(file, params);
// });
// }
// }
// }
// }
// });
});

// describe('.unserialize()', () => {
// test('applies frontier', () => {
// const {file, model} = setup({foo: 'bar'});
// const clone = model.clone();
// clone.api.obj([]).set({
// xyz: 123,
// });
// const serialized = file.serialize({
// history: 'binary',
// });
// serialized.push(clone.api.flush().toBinary());
// expect(file.model.view()).toEqual({foo: 'bar'});
// const file2 = File.unserialize(serialized, fileEncoders);
// expect(file2.model.view()).toEqual({foo: 'bar', xyz: 123});
// });
// });

0 comments on commit 9c0f531

Please sign in to comment.