-
Notifications
You must be signed in to change notification settings - Fork 112
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
e7b59c4
commit d0efd96
Showing
31 changed files
with
2,336 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
import AnchorFileModel from './models/AnchorFileModel'; | ||
import Compressor from './util/Compressor'; | ||
import Encoder from './Encoder'; | ||
import ErrorCode from './ErrorCode'; | ||
import JsonAsync from './util/JsonAsync'; | ||
import Multihash from './Multihash'; | ||
import ProtocolParameters from './ProtocolParameters'; | ||
import { SidetreeError } from '../../Error'; | ||
|
||
/** | ||
* Class containing Anchor File related operations. | ||
*/ | ||
export default class AnchorFile { | ||
/** | ||
* Parses and validates the given anchor file buffer. | ||
* @throws `SidetreeError` if failed parsing or validation. | ||
*/ | ||
public static async parseAndValidate (anchorFileBuffer: Buffer, maxOperationsPerBatch: number): Promise<AnchorFileModel> { | ||
|
||
let anchorFileDecompressedBuffer; | ||
try { | ||
anchorFileDecompressedBuffer = await Compressor.decompress(anchorFileBuffer); | ||
} catch { | ||
throw new SidetreeError(ErrorCode.AnchorFileDecompressionFailure); | ||
} | ||
|
||
let anchorFile; | ||
try { | ||
anchorFile = await JsonAsync.parse(anchorFileDecompressedBuffer); | ||
} catch { | ||
throw new SidetreeError(ErrorCode.AnchorFileNotJson); | ||
} | ||
|
||
const anchorFileProperties = Object.keys(anchorFile); | ||
if (anchorFileProperties.length > 3) { | ||
throw new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty); | ||
} | ||
|
||
if (!anchorFile.hasOwnProperty('batchFileHash')) { | ||
throw new SidetreeError(ErrorCode.AnchorFileBatchFileHashMissing); | ||
} | ||
|
||
if (!anchorFile.hasOwnProperty('didUniqueSuffixes')) { | ||
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesMissing); | ||
} | ||
|
||
if (!anchorFile.hasOwnProperty('merkleRoot')) { | ||
throw new SidetreeError(ErrorCode.AnchorFileMerkleRootMissing); | ||
} | ||
|
||
// Batch file hash validations. | ||
if (typeof anchorFile.batchFileHash !== 'string') { | ||
throw new SidetreeError(ErrorCode.AnchorFileBatchFileHashNotString); | ||
} | ||
|
||
const didUniqueSuffixBuffer = Encoder.decodeAsBuffer(anchorFile.batchFileHash); | ||
if (!Multihash.isValidHash(didUniqueSuffixBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) { | ||
throw new SidetreeError(ErrorCode.AnchorFileBatchFileHashUnsupported, `Batch file hash '${anchorFile.batchFileHash}' is unsupported.`); | ||
} | ||
|
||
// Merkle root hash validations. | ||
if (typeof anchorFile.merkleRoot !== 'string') { | ||
throw new SidetreeError(ErrorCode.AnchorFileMerkleRootNotString); | ||
} | ||
|
||
const merkleRootBuffer = Encoder.decodeAsBuffer(anchorFile.merkleRoot); | ||
if (!Multihash.isValidHash(merkleRootBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) { | ||
throw new SidetreeError(ErrorCode.AnchorFileMerkleRootUnsupported, `Merkle root '${anchorFile.merkleRoot}' is unsupported.`); | ||
} | ||
|
||
// DID Unique Suffixes validations. | ||
if (!Array.isArray(anchorFile.didUniqueSuffixes)) { | ||
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesNotArray); | ||
} | ||
|
||
if (anchorFile.didUniqueSuffixes.length > maxOperationsPerBatch) { | ||
throw new SidetreeError(ErrorCode.AnchorFileExceededMaxOperationCount); | ||
} | ||
|
||
if (this.hasDuplicates(anchorFile.didUniqueSuffixes)) { | ||
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixesHasDuplicates); | ||
} | ||
|
||
// Verify each entry in DID unique suffixes. | ||
for (let uniqueSuffix of anchorFile.didUniqueSuffixes) { | ||
if (typeof uniqueSuffix !== 'string') { | ||
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixEntryNotString); | ||
} | ||
|
||
const maxEncodedHashStringLength = ProtocolParameters.maxEncodedHashStringLength; | ||
if (uniqueSuffix.length > maxEncodedHashStringLength) { | ||
throw new SidetreeError(ErrorCode.AnchorFileDidUniqueSuffixTooLong, `Unique suffix '${uniqueSuffix}' exceeds length of ${maxEncodedHashStringLength}.`); | ||
} | ||
} | ||
|
||
return anchorFile; | ||
} | ||
|
||
/** | ||
* Checkes to see if there are duplicates in the given array. | ||
*/ | ||
public static hasDuplicates<T> (array: Array<T>): boolean { | ||
const uniqueValues = new Set<T>(); | ||
|
||
for (let i = 0; i < array.length; i++) { | ||
const value = array[i]; | ||
if (uniqueValues.has(value)) { | ||
return true; | ||
} | ||
uniqueValues.add(value); | ||
} | ||
|
||
return false; | ||
} | ||
|
||
/** | ||
* Creates a buffer from the input so that the buffer can be persisted. | ||
*/ | ||
public static async createBufferFromAnchorFileModel (anchorFileModel: AnchorFileModel): Promise<Buffer> { | ||
|
||
const anchorFileJson = JSON.stringify(anchorFileModel); | ||
const anchorFileBuffer = Buffer.from(anchorFileJson); | ||
|
||
return Compressor.compress(anchorFileBuffer); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
import AnchoredOperationModel from '../../models/AnchoredOperationModel'; | ||
import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel'; | ||
import Operation from './Operation'; | ||
|
||
/** | ||
* A class that represents an anchored Sidetree operation. | ||
*/ | ||
export default class AnchoredOperation extends Operation implements NamedAnchoredOperationModel { | ||
/** The index this operation was assigned to in the batch. */ | ||
public readonly operationIndex: number; | ||
/** The transaction number of the transaction this operation was batched within. */ | ||
public readonly transactionNumber: number; | ||
/** The logical blockchain time that this opeartion was anchored on the blockchain */ | ||
public readonly transactionTime: number; | ||
|
||
/** | ||
* Constructs an anchored peration if the operation buffer passes schema validation, throws error otherwise. | ||
*/ | ||
private constructor (anchoredOperationModel: AnchoredOperationModel) { | ||
super(anchoredOperationModel.operationBuffer); | ||
|
||
// Properties of an operation in a resolved transaction. | ||
this.operationIndex = anchoredOperationModel.operationIndex; | ||
this.transactionNumber = anchoredOperationModel.transactionNumber; | ||
this.transactionTime = anchoredOperationModel.transactionTime; | ||
} | ||
|
||
/** | ||
* Validates and creates an anchored operation that has been anchored on the blockchain. | ||
* @throws Error if given operation buffer fails any validation. | ||
*/ | ||
public static createAnchoredOperation (anchoredOperationModel: AnchoredOperationModel): AnchoredOperation { | ||
return new AnchoredOperation(anchoredOperationModel); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
import AnchoredOperation from './AnchoredOperation'; | ||
import AnchoredOperationModel from '../../models/AnchoredOperationModel'; | ||
import AnchorFileModel from './models/AnchorFileModel'; | ||
import BatchFileModel from './models/BatchFileModel'; | ||
import Compressor from './util/Compressor'; | ||
import Encoder from './Encoder'; | ||
import JsonAsync from './util/JsonAsync'; | ||
import NamedAnchoredOperationModel from '../../models/NamedAnchoredOperationModel'; | ||
import ProtocolParameters from './ProtocolParameters'; | ||
import timeSpan = require('time-span'); | ||
|
||
/** | ||
* Defines the schema of a Batch File and its related operations. | ||
* NOTE: Must NOT add properties not defined by Sidetree protocol. | ||
*/ | ||
export default class BatchFile { | ||
/** | ||
* Parses and validates the given batch file buffer and all the operations within it. | ||
* @throws Error if failed parsing or validation. | ||
*/ | ||
public static async parseAndValidate ( | ||
batchFileBuffer: Buffer, | ||
anchorFile: AnchorFileModel, | ||
transactionNumber: number, | ||
transactionTime: number | ||
): Promise<NamedAnchoredOperationModel[]> { | ||
|
||
let endTimer = timeSpan(); | ||
const decompressedBatchFileBuffer = await Compressor.decompress(batchFileBuffer); | ||
const batchFileObject = await JsonAsync.parse(decompressedBatchFileBuffer); | ||
console.info(`Parsed batch file ${anchorFile.batchFileHash} in ${endTimer.rounded()} ms.`); | ||
|
||
// Ensure only properties specified by Sidetree protocol are given. | ||
const allowedProperties = new Set(['operations']); | ||
for (let property in batchFileObject) { | ||
if (!allowedProperties.has(property)) { | ||
throw new Error(`Unexpected property ${property} in batch file.`); | ||
} | ||
} | ||
|
||
// Make sure operations is an array. | ||
if (!(batchFileObject.operations instanceof Array)) { | ||
throw new Error('Invalid batch file, operations property is not an array.'); | ||
} | ||
|
||
// Make sure all operations are strings. | ||
batchFileObject.operations.forEach((operation: any) => { | ||
if (typeof operation !== 'string') { | ||
throw new Error('Invalid batch file, operations property is not an array of strings.'); | ||
} | ||
}); | ||
|
||
const batchFile = batchFileObject as BatchFileModel; | ||
const batchSize = batchFile.operations.length; | ||
|
||
// Verify the number of operations does not exceed the maximum allowed limit. | ||
if (batchSize > ProtocolParameters.maxOperationsPerBatch) { | ||
throw new Error(`Batch size of ${batchSize} operations exceeds the allowed limit of ${ProtocolParameters.maxOperationsPerBatch}.`); | ||
} | ||
|
||
// Verify that the batch size count matches that of the anchor file. | ||
const operationCountInAnchorFile = anchorFile.didUniqueSuffixes.length; | ||
if (batchSize !== operationCountInAnchorFile) { | ||
throw new Error(`Batch size of ${batchSize} in batch file '${anchorFile.batchFileHash}' does not size of ${operationCountInAnchorFile} in anchor file.`); | ||
} | ||
|
||
endTimer = timeSpan(); | ||
const namedAnchoredOperationModels: NamedAnchoredOperationModel[] = []; | ||
|
||
for (let operationIndex = 0; operationIndex < batchSize; operationIndex++) { | ||
const encodedOperation = batchFile.operations[operationIndex]; | ||
const operationBuffer = Encoder.decodeAsBuffer(encodedOperation); | ||
|
||
// Verify size of each operation does not exceed the maximum allowed limit. | ||
if (operationBuffer.length > ProtocolParameters.maxOperationByteSize) { | ||
throw new Error(`Operation size of ${operationBuffer.length} bytes exceeds the allowed limit of ${ProtocolParameters.maxOperationByteSize} bytes.`); | ||
} | ||
|
||
const anchoredOperationModel: AnchoredOperationModel = { | ||
operationBuffer, | ||
operationIndex, | ||
transactionNumber, | ||
transactionTime | ||
}; | ||
|
||
const operation = AnchoredOperation.createAnchoredOperation(anchoredOperationModel); | ||
|
||
const didUniqueSuffixesInAnchorFile = anchorFile.didUniqueSuffixes[operationIndex]; | ||
if (operation.didUniqueSuffix !== didUniqueSuffixesInAnchorFile) { | ||
throw new Error(`Operation ${operationIndex}'s DID unique suffix '${operation.didUniqueSuffix}' ` + | ||
`is not the same as '${didUniqueSuffixesInAnchorFile}' seen in anchor file.`); | ||
} | ||
|
||
namedAnchoredOperationModels.push(operation); | ||
} | ||
console.info(`Decoded ${batchSize} operations in batch ${anchorFile.batchFileHash}. Time taken: ${endTimer.rounded()} ms.`); | ||
|
||
return namedAnchoredOperationModels; | ||
} | ||
|
||
/** | ||
* Creates the Batch File buffer from an array of operation Buffers. | ||
* @param operationBuffers Operation buffers in JSON serialized form, NOT encoded in anyway. | ||
* @returns The Batch File buffer. | ||
*/ | ||
public static async fromOperationBuffers (operationBuffers: Buffer[]): Promise<Buffer> { | ||
const operations = operationBuffers.map((operation) => { | ||
return Encoder.encode(operation); | ||
}); | ||
|
||
const rawData = JSON.stringify({ operations }); | ||
const compressedRawData = await Compressor.compress(Buffer.from(rawData)); | ||
|
||
return compressedRawData; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import AnchorFile from './AnchorFile'; | ||
import AnchorFileModel from './models/AnchorFileModel'; | ||
import BatchFile from './BatchFile'; | ||
import ICas from '../../interfaces/ICas'; | ||
import Encoder from './Encoder'; | ||
import IBatchWriter from '../../interfaces/IBatchWriter'; | ||
import IBlockchain from '../../interfaces/IBlockchain'; | ||
import IOperationQueue from './interfaces/IOperationQueue'; | ||
import MerkleTree from './util/MerkleTree'; | ||
import Multihash from './Multihash'; | ||
import Operation from './Operation'; | ||
import ProtocolParameters from './ProtocolParameters'; | ||
|
||
/** | ||
* Implementation of the `TransactionProcessor`. | ||
*/ | ||
export default class BatchWriter implements IBatchWriter { | ||
public constructor ( | ||
private operationQueue: IOperationQueue, | ||
private blockchain: IBlockchain, | ||
private cas: ICas) { } | ||
|
||
public async write () { | ||
// Get the batch of operations to be anchored on the blockchain. | ||
const operationBuffers = await this.operationQueue.peek(ProtocolParameters.maxOperationsPerBatch); | ||
|
||
console.info('Batch size = ' + operationBuffers.length); | ||
|
||
// Do nothing if there is nothing to batch together. | ||
if (operationBuffers.length === 0) { | ||
return; | ||
} | ||
|
||
const batch = operationBuffers.map( | ||
(buffer) => Operation.create(buffer) | ||
); | ||
|
||
// Create the batch file buffer from the operation batch. | ||
const batchFileBuffer = await BatchFile.fromOperationBuffers(operationBuffers); | ||
|
||
// Write the 'batch file' to content addressable store. | ||
const batchFileHash = await this.cas.write(batchFileBuffer); | ||
console.info(`Wrote batch file ${batchFileHash} to content addressable store.`); | ||
|
||
// Compute the Merkle root hash. | ||
const merkleRoot = MerkleTree.create(operationBuffers).rootHash; | ||
const merkleRootAsMultihash = Multihash.encode(merkleRoot, 18); | ||
const encodedMerkleRoot = Encoder.encode(merkleRootAsMultihash); | ||
|
||
// Construct the DID unique suffixes of each operation to be included in the anchor file. | ||
const didUniqueSuffixes = batch.map(operation => operation.didUniqueSuffix); | ||
|
||
// Construct the 'anchor file'. | ||
const anchorFileModel: AnchorFileModel = { | ||
batchFileHash: batchFileHash, | ||
merkleRoot: encodedMerkleRoot, | ||
didUniqueSuffixes | ||
}; | ||
|
||
// Make the 'anchor file' available in content addressable store. | ||
const anchorFileJsonBuffer = await AnchorFile.createBufferFromAnchorFileModel(anchorFileModel); | ||
const anchorFileAddress = await this.cas.write(anchorFileJsonBuffer); | ||
console.info(`Wrote anchor file ${anchorFileAddress} to content addressable store.`); | ||
|
||
// Anchor the 'anchor file hash' on blockchain. | ||
await this.blockchain.write(anchorFileAddress); | ||
|
||
// Remove written operations from queue if batch writing is successful. | ||
await this.operationQueue.dequeue(batch.length); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
import Encoder from './Encoder'; | ||
import Multihash from './Multihash'; | ||
|
||
/** | ||
* Class containing reusable DID related operations. | ||
*/ | ||
export default class Did { | ||
/** | ||
* Calculates the DID from the given DID Document. | ||
*/ | ||
public static from (encodedDidDocument: string, didMethodName: string, hashAlgorithmAsMultihashCode: number): string { | ||
const didUniqueSuffix = Did.getUniqueSuffixFromEncodeDidDocument(encodedDidDocument, hashAlgorithmAsMultihashCode); | ||
const did = didMethodName + didUniqueSuffix; | ||
return did; | ||
} | ||
|
||
/** | ||
* Gets the unique portion of the DID generated from an encoded DID Document. e.g. "did:sidetree:12345" -> "12345" | ||
*/ | ||
public static getUniqueSuffixFromEncodeDidDocument (encodedDidDocument: string, hashAlgorithmAsMultihashCode: number): string { | ||
const didDocumentBuffer = Buffer.from(encodedDidDocument); | ||
const multihash = Multihash.hash(didDocumentBuffer, hashAlgorithmAsMultihashCode); | ||
const encodedMultihash = Encoder.encode(multihash); | ||
return encodedMultihash; | ||
} | ||
|
||
/** | ||
* Checks to see if the given string is a valid generic DID. | ||
*/ | ||
public static isDid (did: string): boolean { | ||
if (!did.startsWith('did:')) { | ||
return false; | ||
} | ||
|
||
const uniqueSuffixWithMethodName = did.substring(4); // e.g. 'sidetree:abc' | ||
const indexOfColonChar = uniqueSuffixWithMethodName.indexOf(':'); | ||
|
||
// ':' must exists and not be the first or last character. | ||
if (indexOfColonChar <= 0 || | ||
indexOfColonChar === (uniqueSuffixWithMethodName.length - 1)) { | ||
return false; | ||
} | ||
|
||
return true; | ||
} | ||
} |
Oops, something went wrong.